diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..823b089 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,104 @@ +name: CI + +on: + push: + branches: [main, develop] + pull_request: + branches: [main, develop] + +jobs: + lint: + name: Lint & Format Check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.61.0 + cache: false + + - name: Run linter + run: pixi run lint + + - name: Check formatting + run: pixi run format-check + + typecheck: + name: Type Check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.61.0 + cache: false + + - name: Run type checker + run: pixi run typecheck + + test: + name: Test Python ${{ matrix.python-version }} + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["py310", "py311", "py312", "py313", "py314"] + + steps: + - uses: actions/checkout@v4 + + - name: Setup Pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.61.0 + cache: false + environments: ${{ matrix.python-version }} + + - name: Run tests + run: pixi run -e ${{ matrix.python-version }} test + + - name: Run field bus tests + run: pixi run -e ${{ matrix.python-version }} test-field-bus + + test-coverage: + name: Test with Coverage + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.61.0 + cache: false + + - name: Run tests with coverage + run: pixi run test-cov + + - name: Upload coverage reports + uses: codecov/codecov-action@v4 + with: + files: ./coverage.xml + fail_ci_if_error: false + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + build: + name: Build Packages + runs-on: ubuntu-latest + needs: [lint, typecheck, test] + steps: + - uses: actions/checkout@v4 + + - name: Setup Pixi + uses: prefix-dev/setup-pixi@v0.8.1 + with: + pixi-version: v0.61.0 + cache: false + + - name: Build packages + run: pixi run build diff --git a/.github/workflows/deploy-dev-release.yml b/.github/workflows/deploy-dev-release.yml index 5bfdba4..79d988d 100644 --- a/.github/workflows/deploy-dev-release.yml +++ b/.github/workflows/deploy-dev-release.yml @@ -1,103 +1,240 @@ -name: Development Artifacts +name: Development Release +# Manual trigger only - creates GitHub release with artifacts and publishes to PyPI on: - push: - branches: - - develop workflow_dispatch: inputs: - force: - description: 'Force deploy' + bump-type: + description: 'How to bump the version' + required: true + type: choice + default: 'prerelease' + options: + - prerelease # 2025.3.2a13 -> 2025.3.2a14, 2026.1.1b1 -> 2026.1.1b2 + - prepatch # 2025.3.2a13 -> 2025.3.3a1, 2026.1.1b1 -> 2026.1.2b1 + - preminor # 2025.3.2a13 -> 2025.4.0a1, 2026.1.1b1 -> 2026.2.0b1 + - custom # Use custom-version input + custom-version: + description: 'Custom version (only if bump-type is "custom", e.g., 2025.4.0b1)' required: false - default: false - type: boolean -jobs: - check-changes: - runs-on: ubuntu-latest - outputs: - any_changed: ${{ steps.changed-files.outputs.any_changed }} - steps: - - name: Checkout repository - uses: actions/checkout@v2 - with: - fetch-depth: 0 + type: string - - name: Get changed files - id: changed-files - uses: tj-actions/changed-files@v45 - with: - files: | - **.py +env: + DOCKER_IMAGE: gridappsd/app-base-container - build: +jobs: + dev-release: runs-on: ubuntu-latest - needs: check-changes - if: needs.check-changes.outputs.any_changed == 'true' || github.event.inputs.force == 'true' + if: github.ref == 'refs/heads/develop' + permissions: + contents: write + packages: write steps: + - name: Verify branch + run: | + echo "Running on branch: ${{ github.ref_name }}" + if [ "${{ github.ref_name }}" != "develop" ]; then + echo "Error: Development releases can only be created from the develop branch." + echo "Current branch: ${{ github.ref_name }}" + exit 1 + fi + - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.10' - - - name: Install Poetry - uses: snok/install-poetry@v1.4.1 + - name: Setup Pixi + uses: prefix-dev/setup-pixi@v0.8.1 with: - virtualenvs-in-project: true - installer-parallel: true + pixi-version: v0.61.0 - - name: Bump version + - name: Determine version id: bump-version run: | - echo "Bumping version..." - ./scripts/run_on_each.sh poetry version prerelease - echo "Version bumped to $(poetry version -s)" - NEW_TAG=v$(poetry version --short) - # Finally because we want to be able to use the variable in later - # steps we set a NEW_TAG environmental variable - echo "NEW_TAG=$(echo ${NEW_TAG})" >> $GITHUB_ENV - - - name: Install dependencies - run: | - ./scripts/run_on_each.sh poetry self add poetry-plugin-export - ./scripts/poetry_install.sh + # Get current version from pixi.toml + CURRENT_VERSION=$(grep '^version' pixi.toml | head -1 | sed 's/version = "\(.*\)"/\1/') + echo "Current version: $CURRENT_VERSION" + + BUMP_TYPE="${{ inputs.bump-type }}" + + if [ "$BUMP_TYPE" == "custom" ]; then + # Use custom version + CUSTOM_VERSION="${{ inputs.custom-version }}" + if [ -z "$CUSTOM_VERSION" ]; then + echo "Error: custom-version is required when bump-type is 'custom'" + exit 1 + fi + NEW_VERSION="$CUSTOM_VERSION" + echo "Using custom version: $NEW_VERSION" + else + # Parse version components + # Handle versions like 2025.3.2a13, 2025.3.2b1, or 2025.3.2 + if [[ $CURRENT_VERSION =~ ^([0-9]+)\.([0-9]+)\.([0-9]+)((a|b)([0-9]+))?$ ]]; then + MAJOR="${BASH_REMATCH[1]}" + MINOR="${BASH_REMATCH[2]}" + PATCH="${BASH_REMATCH[3]}" + PRE_TAG="${BASH_REMATCH[5]:-a}" + PRE_NUM="${BASH_REMATCH[6]:-0}" + else + echo "Error: Cannot parse version $CURRENT_VERSION" + exit 1 + fi + + case $BUMP_TYPE in + prerelease) + NEW_PRE=$((PRE_NUM + 1)) + NEW_VERSION="${MAJOR}.${MINOR}.${PATCH}${PRE_TAG}${NEW_PRE}" + ;; + prepatch) + NEW_PATCH=$((PATCH + 1)) + NEW_VERSION="${MAJOR}.${MINOR}.${NEW_PATCH}${PRE_TAG}1" + ;; + preminor) + NEW_MINOR=$((MINOR + 1)) + NEW_VERSION="${MAJOR}.${NEW_MINOR}.0${PRE_TAG}1" + ;; + esac + fi + + echo "New version: $NEW_VERSION" + echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_ENV + echo "NEW_TAG=v$NEW_VERSION" >> $GITHUB_ENV + + # Update version in pixi.toml + sed -i "s/^version = \".*\"/version = \"$NEW_VERSION\"/" pixi.toml + + # Update version in sub-project pyproject.toml files + sed -i "s/^version = \".*\"/version = \"$NEW_VERSION\"/" gridappsd-python-lib/pyproject.toml + sed -i "s/^version = \".*\"/version = \"$NEW_VERSION\"/" gridappsd-field-bus-lib/pyproject.toml + + - name: Update lock file + run: pixi install - - name: Build project - run: ./scripts/poetry_build.sh + - name: Run tests + run: pixi run test-all - - name: Commit bumped version + - name: Build packages + run: pixi run build + + - name: Commit and tag run: | - git config --global user.name 'gridappsd[bot]' - git config --global user.email 'gridappsd[bot]@users.noreply.github.com' - git commit -am "Bump version to $(poetry version -s)" - git push origin develop + git config --global user.name 'github-actions[bot]' + git config --global user.email 'github-actions[bot]@users.noreply.github.com' + git add pixi.toml gridappsd-python-lib/pyproject.toml gridappsd-field-bus-lib/pyproject.toml + git commit -m "Bump version to ${{ env.NEW_VERSION }}" + git tag ${{ env.NEW_TAG }} + git push origin HEAD:${{ github.ref_name }} + git push origin ${{ env.NEW_TAG }} - - name: Create Release - uses: ncipollo/release-action@v1.15.0 + - name: Publish to PyPI + if: github.repository_owner == 'GRIDAPPSD' env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + run: pixi run publish + + - name: Create GitHub Release + uses: ncipollo/release-action@v1.15.0 with: - artifacts: "dist/*.gz,dist/*.whl" + artifacts: "dist/*.tar.gz,dist/*.whl" artifactErrorsFailBuild: true generateReleaseNotes: true - commit: ${{ github.ref }} prerelease: true tag: ${{ env.NEW_TAG }} + name: "Development Release ${{ env.NEW_VERSION }}" + body: | + ## Development Release + + This is a development/pre-release version published to PyPI. + + ### Installation + + **Via pip (from PyPI):** + ```bash + pip install gridappsd-python==${{ env.NEW_VERSION }} + ``` + + **Via pip (from GitHub release):** + ```bash + pip install https://github.com/GRIDAPPSD/gridappsd-python/releases/download/${{ env.NEW_TAG }}/gridappsd_python-${{ env.NEW_VERSION }}-py3-none-any.whl + ``` + + **Via Docker:** + ```bash + docker pull ${{ env.DOCKER_IMAGE }}:${{ env.NEW_VERSION }} + ``` token: ${{ secrets.GITHUB_TOKEN }} - - name: Publish to PyPI - id: publish-to-pypi + + - name: Verify package available on PyPI + if: github.repository_owner == 'GRIDAPPSD' run: | - # This is needed, because the poetry publish will fail at the top level of the project - # so ./scripts/run_on_each.sh fails for that. - echo "POETRY_PUBLISH_OPTIONS=''" >> $GITHUB_ENV - cd gridappsd-python-lib - poetry config pypi-token.pypi ${{ secrets.PYPI_TOKEN }} - poetry publish - - cd ../gridappsd-field-bus-lib - poetry config pypi-token.pypi ${{ secrets.PYPI_TOKEN }} - poetry publish + for i in $(seq 1 30); do + if pip index versions gridappsd-python 2>/dev/null | grep -q "${{ env.NEW_VERSION }}" || \ + pip install --dry-run --no-deps "gridappsd-python==${{ env.NEW_VERSION }}" 2>/dev/null; then + echo "Package gridappsd-python==${{ env.NEW_VERSION }} is available on PyPI" + exit 0 + fi + echo "Attempt $i/30: Package not yet available, waiting 10s..." + sleep 10 + done + echo "Warning: Package not confirmed on PyPI after 5 minutes, proceeding anyway" + + # Docker build and push + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + if: github.repository_owner == 'GRIDAPPSD' + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_TOKEN }} + + # Build default image (Python 3.12) with develop tag + - name: Build and push Docker image (Python 3.12 - default) + uses: docker/build-push-action@v6 + with: + context: . + push: ${{ github.repository_owner == 'GRIDAPPSD' }} + build-args: | + PYTHON_VERSION=3.12 + GRIDAPPSD_PYTHON_VERSION=${{ env.NEW_VERSION }} + tags: | + ${{ env.DOCKER_IMAGE }}:${{ env.NEW_VERSION }} + ${{ env.DOCKER_IMAGE }}:${{ env.NEW_VERSION }}-py312 + ${{ env.DOCKER_IMAGE }}:develop + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha,mode=max + + # Build Python 3.10 variant + - name: Build and push Docker image (Python 3.10) + uses: docker/build-push-action@v6 + with: + context: . + push: ${{ github.repository_owner == 'GRIDAPPSD' }} + build-args: | + PYTHON_VERSION=3.10 + GRIDAPPSD_PYTHON_VERSION=${{ env.NEW_VERSION }} + tags: | + ${{ env.DOCKER_IMAGE }}:${{ env.NEW_VERSION }}-py310 + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha,mode=max + + # Build Python 3.11 variant + - name: Build and push Docker image (Python 3.11) + uses: docker/build-push-action@v6 + with: + context: . + push: ${{ github.repository_owner == 'GRIDAPPSD' }} + build-args: | + PYTHON_VERSION=3.11 + GRIDAPPSD_PYTHON_VERSION=${{ env.NEW_VERSION }} + tags: | + ${{ env.DOCKER_IMAGE }}:${{ env.NEW_VERSION }}-py311 + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index 0438b96..0000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,101 +0,0 @@ -on: [push, pull_request] - -jobs: - push: - if: github.repository_owner == 'GRIDAPPSD' || github.repository_owner == 'PNNL-CIM-Tools' - runs-on: ubuntu-latest - name: Build and push the docker container - steps: - - uses: actions/checkout@v2 - - - name: Checking environment - env: - DOCKER_IMAGE_NAME: ${{ secrets.DOCKER_IMAGE_NAME }} - run: | - if [ "x${{ env.DOCKER_IMAGE_NAME }}" == "x" ]; then - echo "Error: missing DOCKER_IMAGE_NAME" - exit 1 - fi - - - name: Log in to docker - run: | - if [ -n "${{ secrets.DOCKER_USERNAME }}" -a -n "${{ secrets.DOCKER_TOKEN }}" ]; then - - echo " " - echo "Connecting to docker" - echo "${{ secrets.DOCKER_TOKEN }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin - status=$? - if [ $status -ne 0 ]; then - echo "Error: status $status" - exit 1 - fi - fi - - - name: Build the image - env: - DOCKER_IMAGE_NAME: ${{ secrets.DOCKER_IMAGE_NAME }} - if: env.DOCKER_IMAGE_NAME != null - run: | - TAG="${GITHUB_REF#refs/heads/}" - TAG="${TAG#refs/tags/}" - TAG="${TAG//\//_}" - ORG=`echo "${{ secrets.DOCKER_PROJECT }}" | tr '[:upper:]' '[:lower:]'` - ORG="${ORG:-gridappsd}" - ORG="${ORG:+${ORG}/}" - IMAGE="${ORG}${{ env.DOCKER_IMAGE_NAME }}" - TIMESTAMP=`date +'%y%m%d%H'` - GITHASH=`git log -1 --pretty=format:"%h"` - BUILD_VERSION="${TIMESTAMP}_${GITHASH}${BRANCH:+:$TAG}" - echo "BUILD_VERSION $BUILD_VERSION" - echo "TAG ${IMAGE}:${TIMESTAMP}_${GITHASH}" - docker build --build-arg TIMESTAMP="${BUILD_VERSION}" -t ${IMAGE}:${TIMESTAMP}_${GITHASH} . - status=$? - if [ $status -ne 0 ]; then - echo "Error: status $status" - exit 1 - fi - - - name: Push the image - env: - DOCKER_IMAGE_NAME: ${{ secrets.DOCKER_IMAGE_NAME }} - if: env.DOCKER_IMAGE_NAME != null - run: | - TAG="${GITHUB_REF#refs/heads/}" - TAG="${TAG#refs/tags/}" - TAG="${TAG//\//_}" - ORG=`echo "${{ secrets.DOCKER_PROJECT }}" | tr '[:upper:]' '[:lower:]'` - ORG="${ORG:-gridappsd}" - ORG="${ORG:+${ORG}/}" - IMAGE="${ORG}${{ env.DOCKER_IMAGE_NAME }}" - if [ -n "${{ secrets.DOCKER_USERNAME }}" -a -n "${{ secrets.DOCKER_TOKEN }}" ]; then - - if [ -n "$TAG" -a -n "$ORG" ]; then - # Get the built container name - CONTAINER=`docker images --format "{{.Repository}}:{{.Tag}}" ${IMAGE}` - - echo "docker push ${CONTAINER}" - docker push "${CONTAINER}" - status=$? - if [ $status -ne 0 ]; then - echo "Error: status $status" - exit 1 - fi - - echo "docker tag ${CONTAINER} ${IMAGE}:$TAG" - docker tag ${CONTAINER} ${IMAGE}:$TAG - status=$? - if [ $status -ne 0 ]; then - echo "Error: status $status" - exit 1 - fi - - echo "docker push ${IMAGE}:$TAG" - docker push ${IMAGE}:$TAG - status=$? - if [ $status -ne 0 ]; then - echo "Error: status $status" - exit 1 - fi - fi - - fi diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cb7546c..f726a23 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,147 +1,183 @@ -name: Deploy Release Artifacts +name: Release on: + # Trigger on stable version tags only (no prerelease suffixes) + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+' # Matches v2025.4.0 but not v2025.4.0a1 + # Also allow manual trigger workflow_dispatch: inputs: - previous-version: - description: "Previous version number to use for release notes generation." + version: + description: 'Version to release (without v prefix, e.g., 2025.4.0)' required: true - type: bool - release-version: - description: "Version number to use for this release, do not start with v." - required: true - type: bool - publish-to: - description: "Publish to pypi or pypi-test" - required: true - type: choice - default: "pypi" - options: - - "pypi" - - "pypi-test" - -defaults: - run: - shell: bash + type: string env: - LANG: en_US.utf-8 - LC_ALL: en_US.utf-8 - PYTHON_VERSION: "3.10" + DOCKER_IMAGE: gridappsd/app-base-container jobs: - deploy-release: - runs-on: ubuntu-22.04 + release: + runs-on: ubuntu-latest permissions: - contents: write # To push a branch - pull-requests: write # To create a PR from that branch + contents: write + packages: write steps: - - run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event." - - run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub!" - - run: echo "🔎 The name of your branch is ${{ github.ref }} and your repository is ${{ github.repository }}." - - #---------------------------------------------- - # check-out repo and set-up python - #---------------------------------------------- - - name: Checkout code - uses: actions/checkout@v3 + - name: Checkout repository + uses: actions/checkout@v4 with: fetch-depth: 0 - # ref: develop token: ${{ secrets.GITHUB_TOKEN }} - - name: Set up Python ${{ env.PYTHON_VERSION }} - id: setup-python - uses: actions/setup-python@v4 + - name: Determine version + id: version + run: | + if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then + VERSION="${{ inputs.version }}" + TAG="v$VERSION" + echo "NEEDS_VERSION_UPDATE=true" >> $GITHUB_ENV + else + TAG="${{ github.ref_name }}" + VERSION="${TAG#v}" + echo "NEEDS_VERSION_UPDATE=false" >> $GITHUB_ENV + fi + + # Validate this is a stable version (no prerelease suffix) + if [[ "$VERSION" =~ (a|b|rc|dev|alpha|beta) ]]; then + echo "Error: This workflow is for stable releases only. Use 'Development Release' for prereleases." + exit 1 + fi + + echo "VERSION=$VERSION" >> $GITHUB_ENV + echo "TAG=$TAG" >> $GITHUB_ENV + echo "Releasing stable version: $VERSION" + + - name: Setup Pixi + uses: prefix-dev/setup-pixi@v0.8.1 with: - python-version: ${{ env.PYTHON_VERSION }} + pixi-version: v0.61.0 - #---------------------------------------------- - # ----- install & configure poetry ----- - #---------------------------------------------- - - name: Install Poetry - uses: snok/install-poetry@v1.3.3 - with: - virtualenvs-create: true - virtualenvs-in-project: true - installer-parallel: true - - #---------------------------------------------- - # install your root project, if required - #---------------------------------------------- - - name: Install library + - name: Update version (manual trigger only) + if: env.NEEDS_VERSION_UPDATE == 'true' run: | - ./scripts/poetry_install.sh + # Update version in pixi.toml + sed -i "s/^version = \".*\"/version = \"${{ env.VERSION }}\"/" pixi.toml - #---------------------------------------------- - # Update to new release version - #---------------------------------------------- - - name: Update Version - run: | - ./scripts/run_on_each.sh poetry version ${{ inputs.release-version }} + # Update version in sub-project pyproject.toml files + sed -i "s/^version = \".*\"/version = \"${{ env.VERSION }}\"/" gridappsd-python-lib/pyproject.toml + sed -i "s/^version = \".*\"/version = \"${{ env.VERSION }}\"/" gridappsd-field-bus-lib/pyproject.toml - NEW_TAG=v$(poetry version --short) + - name: Update lock file + if: env.NEEDS_VERSION_UPDATE == 'true' + run: pixi install - # Finally because we want to be able to use the variable in later - # steps we set a NEW_TAG environmental variable - echo "NEW_TAG=$(echo ${NEW_TAG})" >> $GITHUB_ENV + - name: Run quality checks + run: pixi run check - - name: Create build artifacts - run: | - set -x - set -u - set -e + - name: Run tests + run: pixi run test-all - # set the right version in pyproject.toml before build and publish - ./scripts/poetry_build.sh + - name: Build packages + run: pixi run build - - name: Push artifacts to github - uses: ncipollo/release-action@v1 + - name: Commit and tag (manual trigger only) + if: env.NEEDS_VERSION_UPDATE == 'true' + run: | + git config --global user.name 'github-actions[bot]' + git config --global user.email 'github-actions[bot]@users.noreply.github.com' + git add pixi.toml gridappsd-python-lib/pyproject.toml gridappsd-field-bus-lib/pyproject.toml + git commit -m "Release version ${{ env.VERSION }}" + git tag ${{ env.TAG }} + git push origin HEAD:${{ github.ref_name }} + git push origin ${{ env.TAG }} + + - name: Publish to PyPI + if: github.repository_owner == 'GRIDAPPSD' + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + run: pixi run publish + + - name: Create GitHub Release + uses: ncipollo/release-action@v1.15.0 with: - artifacts: "dist/*.gz,dist/*.whl" + artifacts: "dist/*.tar.gz,dist/*.whl" artifactErrorsFailBuild: true generateReleaseNotes: true - commit: ${{ github.ref }} + prerelease: false makeLatest: true - tag: ${{ env.NEW_TAG }} + tag: ${{ env.TAG }} + name: "Release ${{ env.VERSION }}" token: ${{ secrets.GITHUB_TOKEN }} - - name: Publish to pypi - id: publish-to-pypi - if: github.repository_owner == 'GRIDAPPSD' || github.repository_owner == 'PNNL-CIM-Tools' - run: | - set -x - set -u - set -e - - # This is needed, because the poetry publish will fail at the top level of the project - # so ./scripts/run_on_each.sh fails for that. - echo "POETRY_PUBLISH_OPTIONS=''" >> $GITHUB_ENV - cd gridappsd-python-lib - poetry config pypi-token.pypi ${{ secrets.PYPI_TOKEN }} - poetry publish - - cd ../gridappsd-field-bus-lib - poetry config pypi-token.pypi ${{ secrets.PYPI_TOKEN }} - poetry publish - - - name: Publish to pypi test - id: publish-to-pypi-test - if: inputs.publish-to == 'pypi-test' + - name: Verify package available on PyPI + if: github.repository_owner == 'GRIDAPPSD' run: | - set -x - set -u - set -e - - ./scripts/run_on_each.sh poetry config repositories.testpypi https://test.pypi.org/legacy/ - - # This is needed, because the poetry publish will fail at the top level of the project - # so ./scripts/run_on_each.sh fails for that. - echo "POETRY_PUBLISH_OPTIONS='--repository testpypi'" >> $GITHUB_ENV - cd gridappsd-python-lib - poetry config pypi-token.testpypi ${{ secrets.PYPI_TEST_TOKEN }} - poetry publish - - cd ../gridappsd-field-bus-lib - poetry config pypi-token.testpypi ${{ secrets.PYPI_TEST_TOKEN }} - poetry publish + for i in $(seq 1 30); do + if pip index versions gridappsd-python 2>/dev/null | grep -q "${{ env.VERSION }}" || \ + pip install --dry-run --no-deps "gridappsd-python==${{ env.VERSION }}" 2>/dev/null; then + echo "Package gridappsd-python==${{ env.VERSION }} is available on PyPI" + exit 0 + fi + echo "Attempt $i/30: Package not yet available, waiting 10s..." + sleep 10 + done + echo "Warning: Package not confirmed on PyPI after 5 minutes, proceeding anyway" + + # Docker build and push + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + if: github.repository_owner == 'GRIDAPPSD' + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_TOKEN }} + + # Build default image (Python 3.12) with latest tag + - name: Build and push Docker image (Python 3.12 - default) + uses: docker/build-push-action@v6 + with: + context: . + push: ${{ github.repository_owner == 'GRIDAPPSD' }} + build-args: | + PYTHON_VERSION=3.12 + GRIDAPPSD_PYTHON_VERSION=${{ env.VERSION }} + tags: | + ${{ env.DOCKER_IMAGE }}:${{ env.VERSION }} + ${{ env.DOCKER_IMAGE }}:${{ env.VERSION }}-py312 + ${{ env.DOCKER_IMAGE }}:latest + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha,mode=max + + # Build Python 3.10 variant + - name: Build and push Docker image (Python 3.10) + uses: docker/build-push-action@v6 + with: + context: . + push: ${{ github.repository_owner == 'GRIDAPPSD' }} + build-args: | + PYTHON_VERSION=3.10 + GRIDAPPSD_PYTHON_VERSION=${{ env.VERSION }} + tags: | + ${{ env.DOCKER_IMAGE }}:${{ env.VERSION }}-py310 + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha,mode=max + + # Build Python 3.11 variant + - name: Build and push Docker image (Python 3.11) + uses: docker/build-push-action@v6 + with: + context: . + push: ${{ github.repository_owner == 'GRIDAPPSD' }} + build-args: | + PYTHON_VERSION=3.11 + GRIDAPPSD_PYTHON_VERSION=${{ env.VERSION }} + tags: | + ${{ env.DOCKER_IMAGE }}:${{ env.VERSION }}-py311 + platforms: linux/amd64,linux/arm64 + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.gitignore b/.gitignore index d7dc735..eda11cf 100644 --- a/.gitignore +++ b/.gitignore @@ -111,3 +111,13 @@ ENV/ .pytest_cache /gridappsd-python.code-workspace .qodo + +# pixi +.pixi/ +pixi.lock + +# uv +uv.lock + +# ruff +.ruff_cache/ diff --git a/CHANGELOG.md b/CHANGELOG.md deleted file mode 100644 index e69de29..0000000 diff --git a/DOCKER_CONTAINER.md b/DOCKER_CONTAINER.md index b21ce9d..b65ea47 100644 --- a/DOCKER_CONTAINER.md +++ b/DOCKER_CONTAINER.md @@ -1,44 +1,144 @@ # Docker Environment for Applications -The Dockerfile in the gridappsd-python repository is the base for the -gridappsd/application-base-python:main container. It is meant to extended for applications -to utilize. An example of this is used in the -[gridappsd-sample-app](https://github.com/GRIDAPPSD/gridappsd-sample-app). +The `gridappsd/gridappsd-python` Docker image provides a Python environment with `gridappsd-python` pre-installed. Use it as a base for building GridAPPS-D client applications. -## Application Creation +**Important:** This is NOT the GridAPPS-D platform. To run the platform, use [gridappsd-docker](https://github.com/GRIDAPPSD/gridappsd-docker). -Create a new directory to hold your application. Please create a document structure as -in the gridappsd-sample-app above. +## Available Tags -The following Dockerfile is the preferred way of allowing your application to self-register -with the gridappsd server. Please follow the gridappsd-sample-app directory structure. +| Tag | Python | Description | +|-----|--------|-------------| +| `latest` | 3.12 | Latest stable release | +| `develop` | 3.12 | Latest development release | +| `` | 3.12 | Specific version (e.g., `2025.4.0`) | +| `-py310` | 3.10 | Specific version with Python 3.10 | +| `-py311` | 3.11 | Specific version with Python 3.11 | +| `-py312` | 3.12 | Specific version with Python 3.12 | -```` -# Dockerfile from gridappsd-sample-app +## Building a Client Application -# Use the base application container to allow the application to be controlled -# from the gridappsd container. -FROM gridappsd/app-container-base:main +### Basic Dockerfile -# Add the TIMESTAMP variable to capture the build information from -# the travis docker build command and add them to the image. -ARG TIMESTAMP -RUN echo $TIMESTAMP > /dockerbuildversion.txt +```dockerfile +FROM gridappsd/gridappsd-python:latest -# Pick a spot to put our application code -# (note gridappsd-python is located at /usr/src/gridappsd-python) -# and is already installed in the app-container-base environment. -WORKDIR /usr/src/gridappsd-sample +WORKDIR /app -# Add dependencies to the requirements.txt file before -# uncommenting the next two lines -# COPY requirements.txt ./ -# RUN RUN pip install --no-cache-dir -r requirements.txt +# Install additional dependencies (optional) +COPY requirements.txt ./ +RUN pip install --no-cache-dir -r requirements.txt -# Copy all of the source over to the container. -COPY . . +# Copy your application code +COPY my_app.py ./ -# Use a symbolic link to the sample app rather than having to -# mount it at run time (note can still be overriden in docker-compose file) -RUN ln -s /usr/src/gridappsd-sample/sample_app.config /appconfig -```` +CMD ["python", "my_app.py"] +``` + +### Build and Run + +```shell +# Build your application image +docker build -t my-gridappsd-app . + +# Run alongside GridAPPS-D platform +# (assumes gridappsd-docker is running) +docker run --rm \ + --network gridappsd-docker_default \ + -e GRIDAPPSD_ADDRESS=gridappsd \ + -e GRIDAPPSD_PORT=61613 \ + -e GRIDAPPSD_USER=app_user \ + -e GRIDAPPSD_PASSWORD=1234App \ + my-gridappsd-app +``` + +## Environment Variables + +The image sets these defaults (override at runtime): + +| Variable | Default | Description | +|----------|---------|-------------| +| `GRIDAPPSD_ADDRESS` | `gridappsd` | Hostname of GridAPPS-D server | +| `GRIDAPPSD_PORT` | `61613` | STOMP port | +| `GRIDAPPSD_USER` | `app_user` | Username | +| `GRIDAPPSD_PASSWORD` | `1234App` | Password | + +## Running with GridAPPS-D Platform + +### Option 1: Using pixi tasks + +```shell +# Start the GridAPPS-D platform +pixi run docker-up + +# Run your app +docker run --rm --network gridappsd-docker_default my-gridappsd-app + +# Stop the platform when done +pixi run docker-down +``` + +### Option 2: Using docker-compose + +Add your application to a `docker-compose.yml`: + +```yaml +version: '3' + +services: + my-app: + build: . + depends_on: + - gridappsd + environment: + - GRIDAPPSD_ADDRESS=gridappsd + networks: + - gridappsd-docker_default + +networks: + gridappsd-docker_default: + external: true +``` + +## Example Application + +See [gridappsd-sample-app](https://github.com/GRIDAPPSD/gridappsd-sample-app) for a complete example. + +### Minimal Example + +```python +# my_app.py +from gridappsd import GridAPPSD + +def on_message(header, message): + print(f"Received: {message}") + +# Connect using environment variables +gapps = GridAPPSD() +assert gapps.connected, "Failed to connect to GridAPPS-D" + +print(f"Connected to GridAPPS-D") + +# Subscribe to simulation output +gapps.subscribe('/topic/goss.gridappsd.simulation.output.>', on_message) + +# Keep running +import time +try: + while True: + time.sleep(1) +except KeyboardInterrupt: + pass +finally: + gapps.close() +``` + +## Choosing a Python Version + +If your application requires a specific Python version: + +```dockerfile +# Use Python 3.10 for compatibility with older dependencies +FROM gridappsd/gridappsd-python:2025.4.0-py310 + +# ... rest of Dockerfile +``` diff --git a/Dockerfile b/Dockerfile index 924fabd..0e022e9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,69 +1,75 @@ -## Use latest slim Python image. Note that it's built on Debian Stretch. -# `python-base` sets up all our shared environment variables -FROM python:3.8.1-slim as python-base +# GridAPPS-D Python Client Base Image +# +# This image provides a ready-to-use environment with gridappsd-python installed. +# Use it as a base for your GridAPPS-D client applications. +# +# NOTE: This is NOT the GridAPPS-D platform itself. To run the full platform, +# use gridappsd-docker: https://github.com/GRIDAPPSD/gridappsd-docker +# +# Available tags: +# - gridappsd/gridappsd-python:latest (Python 3.12, latest stable release) +# - gridappsd/gridappsd-python:develop (Python 3.12, latest dev release) +# - gridappsd/gridappsd-python: (Python 3.12, specific version) +# - gridappsd/gridappsd-python:-py310 (Python 3.10, specific version) +# - gridappsd/gridappsd-python:-py311 (Python 3.11, specific version) +# - gridappsd/gridappsd-python:-py312 (Python 3.12, specific version) +# +# Usage - Create a client application: +# +# FROM gridappsd/gridappsd-python:latest +# COPY requirements.txt /app/ +# RUN pip install -r /app/requirements.txt +# COPY my_app.py /app/ +# CMD ["python", "/app/my_app.py"] +# +# Build and run with GridAPPS-D platform: +# +# # Build your app +# docker build -t my-gridappsd-app . +# +# # Run alongside GridAPPS-D (assumes gridappsd-docker is running) +# docker run --rm --network gridappsd-docker_default \ +# -e GRIDAPPSD_ADDRESS=gridappsd \ +# my-gridappsd-app + +ARG PYTHON_VERSION=3.12 +FROM python:${PYTHON_VERSION}-slim ARG GRIDAPPSD_PYTHON_VERSION - # python +# Python environment settings ENV PYTHONUNBUFFERED=1 \ - # prevents python creating .pyc files PYTHONDONTWRITEBYTECODE=1 \ - \ - # pip PIP_NO_CACHE_DIR=off \ PIP_DISABLE_PIP_VERSION_CHECK=on \ - PIP_DEFAULT_TIMEOUT=100 \ - # paths - # this is where our requirements + virtual environment will live - PYSETUP_PATH="/opt/pysetup" \ - VENV_PATH="/opt/pysetup/.venv" - + PIP_DEFAULT_TIMEOUT=100 -# prepend poetry and venv to path -ENV PATH="$VENV_PATH/bin:$PATH" - -# gridappsd environment -ENV GRIDAPPSD_PORT="61613" \ - GRIDAPPSD_URI="tcp://gridappsd:${GRIDAPPSD_PORT}" \ +# GridAPPS-D connection defaults (override at runtime) +ENV GRIDAPPSD_ADDRESS="gridappsd" \ + GRIDAPPSD_PORT="61613" \ GRIDAPPSD_USER="app_user" \ - GRIDAPPSD_PASSWORD="1234App" \ - GRIDAPPSD_PYTHON_VERSION=${GRIDAPPSD_PYTHON_VERSION} - + GRIDAPPSD_PASSWORD="1234App" -# `builder-base` stage is used to build deps + create our virtual environment -FROM python-base as builder-base +# Install system dependencies RUN apt-get update \ && apt-get install --no-install-recommends -y \ - # deps for installing poetry curl \ - # deps for building python deps - build-essential - -# copy project requirement files here to ensure they will be cached. -WORKDIR $PYSETUP_PATH - -RUN python -m "venv" "$VENV_PATH" \ - && "$VENV_PATH/bin/pip3" install --upgrade gridappsd-python${GRIDAPPSD_PYTHON_VERSION} - -# `development` image is used during development / testing -FROM python-base as development -ENV GRIDAPPSD_ENV=production -WORKDIR $PYSETUP_PATH - -# # copy in our built poetry + venv -# COPY --from=builder-base $POETRY_HOME $POETRY_HOME -COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH + git \ + && rm -rf /var/lib/apt/lists/* -# quicker install as runtime deps are already installed -# RUN poetry install +# Create app directory +WORKDIR /app -# # will become mountpoint of our code -# WORKDIR /code +# Install gridappsd-python +# If GRIDAPPSD_PYTHON_VERSION is set (e.g., "2025.3.2"), install that exact version +# Otherwise install latest from PyPI +# --pre allows installing prerelease versions (e.g., 2025.3.2a17) +RUN pip install --upgrade pip \ + && if [ -n "$GRIDAPPSD_PYTHON_VERSION" ]; then \ + pip install --pre "gridappsd-python==$GRIDAPPSD_PYTHON_VERSION"; \ + else \ + pip install --pre gridappsd-python; \ + fi -# # `production` image used for runtime -# FROM python-base as production -# ENV GRIDAPPSD_ENV=production -# COPY --from=builder-base $PYSETUP_PATH $PYSETUP_PATH -# COPY . /code -# WORKDIR /code -CMD ["register_app"] +# Default command shows installed version +CMD ["python", "-c", "import gridappsd; print(f'gridappsd-python version: {gridappsd.__version__}')"] diff --git a/README.md b/README.md index 018b7a4..1fb341d 100644 --- a/README.md +++ b/README.md @@ -1,276 +1,312 @@ -[![Run All Pytests](https://github.com/GRIDAPPSD/gridappsd-python/actions/workflows/run-pytest.yml/badge.svg)](https://github.com/GRIDAPPSD/gridappsd-python/actions/workflows/run-pytest.yml) +[![CI](https://github.com/GRIDAPPSD/gridappsd-python/actions/workflows/ci.yml/badge.svg)](https://github.com/GRIDAPPSD/gridappsd-python/actions/workflows/ci.yml) # gridappsd-python -Python library for developing applications and services against the gridappsd api + +Python library for developing applications and services against the GridAPPS-D API. ## Requirements -The gridappsd-python library requires a python version >= 3.6 and < 4 in order to work properly (Note no testing -has been done with python 4 to date). +- Python >= 3.10, < 4.0 +- [Pixi](https://pixi.sh) (for development) ## Installation -The recommended installation of `gridappsd-python` is in a separate virtual environment. Executing the following -will create an environment called `griddapps-env`. +### Stable Releases (PyPI) + +Install the latest stable release from PyPI: ```shell -python3 -m venv gridappsd-env +pip install gridappsd-python ``` -Sourcing the gridappsd-env activates the newly created python environment. +### Development Releases (GitHub) + +Development releases are published to GitHub Releases (not PyPI). To install a development version: ```shell -source gridappsd-env/bin/activate +# Install a specific dev release directly from GitHub +pip install https://github.com/GRIDAPPSD/gridappsd-python/releases/download/v2025.3.2a14/gridappsd_python-2025.3.2a14-py3-none-any.whl + +# Or install from a specific git tag +pip install git+https://github.com/GRIDAPPSD/gridappsd-python.git@v2025.3.2a14#subdirectory=gridappsd-python-lib + +# Or install the latest from the develop branch +pip install git+https://github.com/GRIDAPPSD/gridappsd-python.git@develop#subdirectory=gridappsd-python-lib ``` -Upgrade pip to the latest (some packages require 19.0+ version of pip). +Browse available releases at: https://github.com/GRIDAPPSD/gridappsd-python/releases + +For detailed instructions on adding `gridappsd-python` to your project using `requirements.txt`, `pyproject.toml`, or `pixi.toml`, see the [Installation Guide](docs/INSTALLATION.md). + +### For Developers + +This project uses [Pixi](https://pixi.sh) for development environment and task management. + +#### Install Pixi ```shell -python -m pip install pip --upgrade +curl -fsSL https://pixi.sh/install.sh | bash ``` -Install the latest `gridappsd-python` and its dependencies in the virtual environment. +#### Clone and Setup ```shell -pip install gridappsd-python +git clone https://github.com/GRIDAPPSD/gridappsd-python -b develop +cd gridappsd-python + +# Install all dependencies and create the development environment +pixi install + +# Verify installation +pixi run test ``` -### Verifying things are working properly +#### Available Tasks -The following code snippet assumes you have created a gridappsd instance using the steps in -https://github.com/GRIDAPPSD/gridappsd-docker. +```shell +# List all available tasks +pixi task list + +# Run tests +pixi run test # Run main library tests +pixi run test-field-bus # Run field bus tests +pixi run test-all # Run all tests +pixi run test-cov # Run tests with coverage + +# Code quality +pixi run lint # Run linter (ruff) +pixi run lint-fix # Auto-fix lint issues +pixi run format # Format code (ruff) +pixi run format-check # Check formatting +pixi run typecheck # Run type checker (mypy) +pixi run check # Run all quality checks + +# Building +pixi run build # Build all packages +pixi run build-lib # Build main library only +pixi run build-field-bus # Build field bus library only + +# CI workflows +pixi run ci # Run full CI pipeline (lint + typecheck + tests) +pixi run release # Full release workflow + +# Docker (for integration testing) +pixi run docker-up # Start GridAPPS-D containers +pixi run docker-down # Stop containers +pixi run docker-logs # Follow container logs + +# Utilities +pixi run clean # Clean build artifacts +pixi run pre-commit-install # Install pre-commit hooks +``` -Create a test script (tester.py) with the following content. +#### Testing with Different Python Versions -```python +The project supports Python 3.10 through 3.14. You can run tests against specific versions: + +```shell +pixi run -e py310 test # Test with Python 3.10 +pixi run -e py311 test # Test with Python 3.11 +pixi run -e py312 test # Test with Python 3.12 +pixi run -e py313 test # Test with Python 3.13 +pixi run -e py314 test # Test with Python 3.14 +``` + +## Quick Start + +The following code snippet assumes you have a GridAPPS-D instance running using +[gridappsd-docker](https://github.com/GRIDAPPSD/gridappsd-docker). +```python from gridappsd import GridAPPSD def on_message_callback(header, message): print(f"header: {header} message: {message}") -# Note these should be changed on the server in a cyber secure environment! +# Note: credentials should be changed in a production environment! username = "app_user" password = "1234App" -# Note: there are other parameters for connecting to -# systems other than localhost +# Connect to GridAPPS-D (defaults to localhost) gapps = GridAPPSD(username=username, password=password) assert gapps.connected gapps.send('send.topic', {"foo": "bar"}) -# Note we are sending the function not executing the function in the second parameter +# Subscribe to a topic (pass the function, don't call it) gapps.subscribe('subscribe.topic', on_message_callback) -gapps.send('subcribe.topic', 'A message about subscription') +gapps.send('subscribe.topic', 'A message about subscription') +import time time.sleep(5) gapps.close() - ``` -Start up the gridappsd-docker enabled platform. Then run the following to execute the tester.py script +## Docker + +### Running the GridAPPS-D Platform + +The `docker-up` task clones and runs [gridappsd-docker](https://github.com/GRIDAPPSD/gridappsd-docker), which starts the **full GridAPPS-D platform** (including Blazegraph, MySQL, and all services): ```shell -python tester.py +# Start the full GridAPPS-D platform +pixi run docker-up + +# View logs +pixi run docker-logs + +# Stop the platform +pixi run docker-down ``` -## Application Developers +This is useful for integration testing your applications against a real GridAPPS-D instance. -### Deployment +### Client Application Base Image -Please see [DOCKER_CONTAINER.md](DOCKER_CONTAINER.md) for working within the docker application base container. +We publish a **client base image** (`gridappsd/gridappsd-python`) for building containerized GridAPPS-D applications. This image is NOT the platform itself - it's a Python environment with `gridappsd-python` pre-installed. -### Local Development +**Available tags:** + +| Tag | Description | +|-----|-------------| +| `latest` | Latest stable release (Python 3.12) | +| `develop` | Latest development release (Python 3.12) | +| `` | Specific version (e.g., `2025.4.0`) | +| `-py310` | Specific version with Python 3.10 | +| `-py311` | Specific version with Python 3.11 | +| `-py312` | Specific version with Python 3.12 | + +**Example: Building a Client Application** + +Create a `Dockerfile` for your application: + +```dockerfile +FROM gridappsd/gridappsd-python:latest -Developing applications against gridappsd using the `gridappsd-python` library should follow the same steps -as above, however with a couple of environmental variables specified. The following environmental variables are -available to provide the same context that would be available from inside the application docker container. These -are useful to know for developing your application outside of the docker context (e.g. in a python notebook). +# Install additional dependencies +COPY requirements.txt /app/ +RUN pip install -r /app/requirements.txt -***NOTE: you can also define these your ~./bashrc file so you don't have to specify them all the time*** +# Copy your application +COPY my_app.py /app/ + +CMD ["python", "/app/my_app.py"] +``` + +Build and run alongside the GridAPPS-D platform: ```shell -# export allows all processes started by this shell to have access to the global variable +# Build your app +docker build -t my-gridappsd-app . + +# Start GridAPPS-D platform (if not already running) +pixi run docker-up + +# Run your app on the same network +docker run --rm --network gridappsd-docker_default \ + -e GRIDAPPSD_ADDRESS=gridappsd \ + my-gridappsd-app +``` + +See also: [DOCKER_CONTAINER.md](DOCKER_CONTAINER.md) for more details. + +## Application Developers + +### Local Development -# address where the gridappsd server is running - default localhost +When developing applications locally (outside of Docker), set these environment variables: + +```shell +# Address where the GridAPPS-D server is running (default: localhost) export GRIDAPPSD_ADDRESS=localhost -# port to connect to on the gridappsd server (the stomp client port) +# STOMP client port (default: 61613) export GRIDAPPSD_PORT=61613 -# username to connect to the gridappsd server +# Credentials export GRIDAPPSD_USER=app_user - -# password to connect to the gridappsd server export GRIDAPPSD_PASSWORD=1234App - -# Note these should be changed on the server in a cyber secure environment! ``` -The following is the same tester code as above, but with the environment variables set. The environment variables -should be set in your environment when running the application inside our docker container. +With environment variables set, you can connect without explicit credentials: ```python - from gridappsd import GridAPPSD def on_message_callback(header, message): print(f"header: {header} message: {message}") -# Create GridAPPSD object and connect to the gridappsd server. +# Connect using environment variables gapps = GridAPPSD() assert gapps.connected gapps.send('send.topic', {"foo": "bar"}) - -# Note we are sending the function not executing the function in the second parameter gapps.subscribe('subscribe.topic', on_message_callback) +gapps.send('subscribe.topic', 'A message about subscription') -gapps.send('subcribe.topic', 'A message about subscription') - +import time time.sleep(5) gapps.close() - ``` -## Developers - -This project uses poetry to build the environment for execution. Follow the instructions -https://python-poetry.org/docs/#installation to install poetry. As a developer I prefer not to have poetry installed -in the same virtual environment that my projects are in. - -Clone the github repository: - -```shell -git clone https://github.com/GRIDAPPSD/gridappsd-python -b develop -cd gridappsd-python -``` +## Testing -The following commands build and install a local wheel into an environment created just for this package. +### Running Tests ```shell -# Build the project (stores in dist directory both .tar.gz and .whl file) -poetry build - -# Install the wheel into the environment and the dev dependencies -poetry install +# Run all tests +pixi run test-all -# Install only the library dependencies -poetry install --no-dev +# Run with coverage +pixi run test-cov ``` -***Note:*** Poetry does not have a setup.py that you can install in editable mode like with pip install -e ., however -you can extract the generated setup.py file from the built tar.gz file in the dist directory. Just extract the -.tar.gz file and copy the setup.py file from the extracted directory to the root of gridappsd-python. Then you can -enable editing through pip install -e. as normal. - - -## Testing - -Testing has become an integral part of the software lifecycle. The `gridappsd-python` library has both unit and -integration tests available to be run. In order to execute these, you must have installed the gridappsd-python library -as above with dev-dependencies. - -During the testing phase the docker containers required for the tests are downloaded from -dockerhub and started. By default the `develop` tag is used to test the library using pytest. -One can customize the docker image tag by setting the environmental -variable `GRIDAPPSD_TAG_ENV` either by `export GRIDAPPSD_TAG_ENV=other_tag` or by executing -pytest with the following: - -```shell script +### Environment Variables for Testing -# Export environmental variables and all tests will use the same tag (other_tag) to pull from docker hub. -# Default tag is develop -export GRIDAPPSD_TAG_ENV=other_tag -pytest - -# Tests also require the username and password to be avaialable as environmental variables -# in order for them to properly run these tests -export GRIDAPPSD_USER=user -export GRIDAPPSD_PASSWORD=pass +```shell +# Docker image tag to use (default: develop) +export GRIDAPPSD_TAG_ENV=develop -pytest +# Credentials for integration tests +export GRIDAPPSD_USER=system +export GRIDAPPSD_PASSWORD=manager ``` - ***NOTE: the first running the tests will download all of the docker images associated with the - [GOSS-GridAPPS-D](http://github.com/GRIDAPPSD/GOSS-GridAPPS-D) repository. This process may take some time.*** +**Note:** The first test run will download Docker images from [GOSS-GridAPPS-D](http://github.com/GRIDAPPSD/GOSS-GridAPPS-D). This may take some time. -### Running tests created in a new project +### Using Test Fixtures in Your Project -The `gridappsd-python` library exposes a testing environment through the `gridappsd.docker_handler` module. Including the following -`conftest.py` in the root of your base test directory allows tests to reference these. Using these fixtures will start all of the -base containers required for `gridappsd` to run. +The `gridappsd-python` library provides testing fixtures through `gridappsd.docker_handler`. Create a `conftest.py` in your test directory: ```python - # conftest.py -# Create a conftest.py file in the root of the tests directory to enable usage throughout the tests directory and below. -# -# Tested project structure an layout -# -# project-folder\ -# mainmodule\ -# __init__.py -# myapplication.py -# tests\ -# conftest.py -# test_myapplication.py -# README.md - import logging import os import sys import pytest - from gridappsd import GridAPPSD, GOSS -from gridappsd.docker_handler import run_dependency_containers, run_gridappsd_container, Containers - -levels = dict( - CRITICAL=50, - FATAL=50, - ERROR=40, - WARNING=30, - WARN=30, - INFO=20, - DEBUG=10, - NOTSET=0 -) +from gridappsd.docker_handler import run_dependency_containers, run_gridappsd_container -# Get string representation of the log level passed LOG_LEVEL = os.environ.get("LOG_LEVEL", "INFO") - -# Make sure the level passed is one of the valid levels. -if LOG_LEVEL not in levels.keys(): - raise AttributeError("Invalid LOG_LEVEL environmental variable set.") - -# Set the numeric version of log level to pass to the basicConfig function -LOG_LEVEL = levels[LOG_LEVEL] - -logging.basicConfig(stream=sys.stdout, level=LOG_LEVEL, - format="%(asctime)s|%(levelname)s|%(name)s|%(message)s") -logging.getLogger("urllib3.connectionpool").setLevel(logging.INFO) -logging.getLogger("docker.utils.config").setLevel(logging.INFO) -logging.getLogger("docker.auth").setLevel(logging.INFO) - +logging.basicConfig( + stream=sys.stdout, + level=getattr(logging, LOG_LEVEL), + format="%(asctime)s|%(levelname)s|%(name)s|%(message)s" +) STOP_CONTAINER_AFTER_TEST = os.environ.get('GRIDAPPSD_STOP_CONTAINERS_AFTER_TESTS', True) @pytest.fixture(scope="module") def docker_dependencies(): - print("Docker dependencies") - # Containers.reset_all_containers() - with run_dependency_containers(stop_after=STOP_CONTAINER_AFTER_TEST) as dep: yield dep - print("Cleanup docker dependencies") @pytest.fixture @@ -279,8 +315,7 @@ def gridappsd_client(request, docker_dependencies): gappsd = GridAPPSD() gappsd.connect() assert gappsd.connected - models = gappsd.query_model_names() - assert models is not None + if request.cls is not None: request.cls.gridappsd_client = gappsd yield gappsd @@ -294,34 +329,41 @@ def goss_client(docker_dependencies): goss = GOSS() goss.connect() assert goss.connected - yield goss - ``` -Using the above fixtures from inside a test module and test function looks like the following: +Example test using the fixtures: ```python +import os +from unittest import mock +from gridappsd import ProcessStatusEnum -# Example test function using the gridappsd_client fixture - -@mock.patch.dict(os.environ, {"GRIDAPPSD_APPLICATION_ID": "helics_goss_bridge.py"}) +@mock.patch.dict(os.environ, {"GRIDAPPSD_APPLICATION_ID": "my_app.py"}) def test_gridappsd_status(gridappsd_client): gappsd = gridappsd_client - assert "helics_goss_bridge.py" == gappsd.get_application_id() + assert "my_app.py" == gappsd.get_application_id() assert gappsd.get_application_status() == ProcessStatusEnum.STARTING.value - assert gappsd.get_service_status() == ProcessStatusEnum.STARTING.value - gappsd.set_application_status("RUNNING") - assert gappsd.get_service_status() == ProcessStatusEnum.RUNNING.value + gappsd.set_application_status("RUNNING") assert gappsd.get_application_status() == ProcessStatusEnum.RUNNING.value +``` - gappsd.set_service_status("COMPLETE") - assert gappsd.get_service_status() == ProcessStatusEnum.COMPLETE.value - assert gappsd.get_application_status() == ProcessStatusEnum.COMPLETE.value +## Project Structure - # Invalid - gappsd.set_service_status("Foo") - assert gappsd.get_service_status() == ProcessStatusEnum.COMPLETE.value - assert gappsd.get_application_status() == ProcessStatusEnum.COMPLETE.value ``` +gridappsd-python/ +├── gridappsd-python-lib/ # Main library +│ ├── gridappsd/ # Source code +│ └── tests/ # Tests +├── gridappsd-field-bus-lib/ # Field bus library +│ ├── gridappsd_field_bus/ # Source code +│ └── tests/ # Tests +├── pixi.toml # Pixi configuration +├── pixi.lock # Lock file +└── .github/workflows/ # CI workflows +``` + +## License + +BSD-3-Clause diff --git a/compute_req_log.txt b/compute_req_log.txt new file mode 100644 index 0000000..22b9155 --- /dev/null +++ b/compute_req_log.txt @@ -0,0 +1,2 @@ +[COMPUTE_REQ] 2026-02-06 14:28:12,596 - Total message size summary: +[COMPUTE_REQ] 2026-02-06 14:28:12,596 - Function call counts summary: diff --git a/docs/INSTALLATION.md b/docs/INSTALLATION.md new file mode 100644 index 0000000..08dabc6 --- /dev/null +++ b/docs/INSTALLATION.md @@ -0,0 +1,186 @@ +# Installation Guide + +This guide covers different ways to add `gridappsd-python` as a dependency in your project. + +## Quick Install + +```shell +pip install gridappsd-python +``` + +## Adding as a Dependency + +### requirements.txt + +```txt +# Stable release from PyPI +gridappsd-python>=2025.3.0 + +# Or pin to a specific version +gridappsd-python==2025.3.2 + +# Development release from GitHub +gridappsd-python @ https://github.com/GRIDAPPSD/gridappsd-python/releases/download/v2025.3.2a14/gridappsd_python-2025.3.2a14-py3-none-any.whl + +# Or install from a git tag +gridappsd-python @ git+https://github.com/GRIDAPPSD/gridappsd-python.git@v2025.3.2a14#subdirectory=gridappsd-python-lib + +# Or install from the develop branch (latest development code) +gridappsd-python @ git+https://github.com/GRIDAPPSD/gridappsd-python.git@develop#subdirectory=gridappsd-python-lib +``` + +### pyproject.toml (PEP 621 / pip) + +```toml +[project] +dependencies = [ + # Stable release from PyPI + "gridappsd-python>=2025.3.0", +] + +# For development releases, use optional dependencies +[project.optional-dependencies] +dev = [ + "gridappsd-python @ git+https://github.com/GRIDAPPSD/gridappsd-python.git@develop#subdirectory=gridappsd-python-lib", +] +``` + +### pyproject.toml (Poetry) + +```toml +[tool.poetry.dependencies] +# Stable release from PyPI +gridappsd-python = "^2025.3.0" + +# Or pin to specific version +gridappsd-python = "2025.3.2" + +# Development release from git +gridappsd-python = { git = "https://github.com/GRIDAPPSD/gridappsd-python.git", branch = "develop", subdirectory = "gridappsd-python-lib" } + +# Or from a specific tag +gridappsd-python = { git = "https://github.com/GRIDAPPSD/gridappsd-python.git", tag = "v2025.3.2a14", subdirectory = "gridappsd-python-lib" } +``` + +### pixi.toml + +```toml +[pypi-dependencies] +# Stable release from PyPI +gridappsd-python = ">=2025.3.0" + +# Or pin to specific version +gridappsd-python = "==2025.3.2" + +# Development release from git +gridappsd-python = { git = "https://github.com/GRIDAPPSD/gridappsd-python.git", branch = "develop", subdirectory = "gridappsd-python-lib" } + +# Or from a specific tag +gridappsd-python = { git = "https://github.com/GRIDAPPSD/gridappsd-python.git", tag = "v2025.3.2a14", subdirectory = "gridappsd-python-lib" } + +# Or from a specific commit +gridappsd-python = { git = "https://github.com/GRIDAPPSD/gridappsd-python.git", rev = "abc1234", subdirectory = "gridappsd-python-lib" } +``` + +## Including gridappsd-field-bus + +The `gridappsd-field-bus` package provides additional field bus functionality and depends on `gridappsd-python`. + +### requirements.txt + +```txt +# Both packages from PyPI +gridappsd-python>=2025.3.0 +gridappsd-field-bus>=2025.3.0 + +# Or from GitHub releases +gridappsd-python @ https://github.com/GRIDAPPSD/gridappsd-python/releases/download/v2025.3.2a14/gridappsd_python-2025.3.2a14-py3-none-any.whl +gridappsd-field-bus @ https://github.com/GRIDAPPSD/gridappsd-python/releases/download/v2025.3.2a14/gridappsd_field_bus-2025.3.2a14-py3-none-any.whl +``` + +### pyproject.toml (PEP 621) + +```toml +[project] +dependencies = [ + "gridappsd-python>=2025.3.0", + "gridappsd-field-bus>=2025.3.0", +] +``` + +### pyproject.toml (Poetry) + +```toml +[tool.poetry.dependencies] +gridappsd-python = "^2025.3.0" +gridappsd-field-bus = "^2025.3.0" + +# Or from git (both packages) +gridappsd-python = { git = "https://github.com/GRIDAPPSD/gridappsd-python.git", branch = "develop", subdirectory = "gridappsd-python-lib" } +gridappsd-field-bus = { git = "https://github.com/GRIDAPPSD/gridappsd-python.git", branch = "develop", subdirectory = "gridappsd-field-bus-lib" } +``` + +### pixi.toml + +```toml +[pypi-dependencies] +gridappsd-python = ">=2025.3.0" +gridappsd-field-bus = ">=2025.3.0" + +# Or from git +gridappsd-python = { git = "https://github.com/GRIDAPPSD/gridappsd-python.git", branch = "develop", subdirectory = "gridappsd-python-lib" } +gridappsd-field-bus = { git = "https://github.com/GRIDAPPSD/gridappsd-python.git", branch = "develop", subdirectory = "gridappsd-field-bus-lib" } +``` + +## Version Specifiers + +| Specifier | Meaning | +|-----------|---------| +| `>=2025.3.0` | Version 2025.3.0 or newer | +| `^2025.3.0` | Compatible with 2025.3.x (Poetry) | +| `~=2025.3.0` | Compatible release (>=2025.3.0, <2026.0.0) | +| `==2025.3.2` | Exact version | +| `>=2025.3.0,<2026.0.0` | Range | + +## Verifying Installation + +```python +import gridappsd +print(gridappsd.__version__) + +# Test connection (requires running GridAPPS-D instance) +from gridappsd import GridAPPSD +gapps = GridAPPSD() +print(f"Connected: {gapps.connected}") +``` + +## Troubleshooting + +### Git dependency not installing + +If you get errors with git dependencies, ensure you have git installed and accessible: + +```shell +git --version +``` + +### SSL certificate errors + +If you encounter SSL errors when installing from GitHub, do **not** bypass certificate verification, as this can expose you to security risks. + +Instead, try the following steps: + +- Ensure your system's CA certificates are up to date. On Ubuntu/Debian, run: + ```shell + sudo apt-get update && sudo apt-get install --reinstall ca-certificates +### Subdirectory not found + +When using git dependencies with subdirectories, ensure the syntax is correct: + +```shell +# Correct +pip install "git+https://github.com/GRIDAPPSD/gridappsd-python.git@develop#subdirectory=gridappsd-python-lib" + +# Wrong (missing subdirectory) +pip install "git+https://github.com/GRIDAPPSD/gridappsd-python.git@develop" +``` diff --git a/gridappsd-field-bus-lib/README.md b/gridappsd-field-bus-lib/README.md index e69de29..95c5e03 100644 --- a/gridappsd-field-bus-lib/README.md +++ b/gridappsd-field-bus-lib/README.md @@ -0,0 +1,41 @@ +# gridappsd-field-bus + +A distributed field bus communication framework for the [GridAPPS-D](https://gridappsd.readthedocs.io) platform. Provides hierarchical agent-based communication across power grid field devices, enabling decentralized control and context management at the substation, feeder, switch area, and secondary area levels. + +## Features + +- **Message Bus Abstraction** - Pluggable message bus architecture with support for STOMP, GridAPPS-D, and VOLTTRON connection types +- **Distributed Agents** - Hierarchical agent framework (`SubstationAgent`, `FeederAgent`, `SwitchAreaAgent`, `SecondaryAreaAgent`) for multi-level grid communication +- **Context Management** - Topology-aware context managers that provide neighborhood information to distributed agents via CIM-Graph +- **Field Proxy Forwarding** - Bridges field device buses to the operational technology (OT) bus when direct connections are unavailable +- **Protocol Support** - Extensible protocol transformers for IEEE 2030.5 and DNP3 field protocols + +## Installation + +```bash +pip install gridappsd-field-bus +``` + +## Requirements + +- Python >= 3.10 +- [gridappsd-python](https://pypi.org/project/gridappsd-python/) +- [cim-graph](https://pypi.org/project/cim-graph/) + +## CLI Commands + +**Start the field proxy forwarder:** + +```bash +start-field-bus-forwarder --username app_user --password 1234App +``` + +**Start centralized context managers:** + +```bash +context_manager --feeder_id --simulation_id +``` + +## Documentation + +See the main [GridAPPS-D Python repository](https://github.com/GRIDAPPSD/gridappsd-python) for full documentation. diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/__init__.py b/gridappsd-field-bus-lib/gridappsd_field_bus/__init__.py index 623136e..279df0f 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/__init__.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/__init__.py @@ -1,7 +1,7 @@ from gridappsd_field_bus.field_interface.interfaces import ( - MessageBusDefinitions, - MessageBusDefinition, - FieldMessageBus, - FieldProtocol, - DeviceFieldInterface -) \ No newline at end of file + MessageBusDefinitions as MessageBusDefinitions, + MessageBusDefinition as MessageBusDefinition, + FieldMessageBus as FieldMessageBus, + FieldProtocol as FieldProtocol, + DeviceFieldInterface as DeviceFieldInterface, +) diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/agents/__init__.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/agents/__init__.py index af50827..deb0b9d 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/agents/__init__.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/agents/__init__.py @@ -1,7 +1,13 @@ from typing import List -from gridappsd_field_bus.field_interface.agents.agents import (FeederAgent, DistributedAgent, - CoordinatingAgent, SwitchAreaAgent, - SecondaryAreaAgent, SubstationAgent, compute_req) +from gridappsd_field_bus.field_interface.agents.agents import ( + FeederAgent, + DistributedAgent, + CoordinatingAgent, + SwitchAreaAgent, + SecondaryAreaAgent, + SubstationAgent, + compute_req, +) __all__: List[str] = ["FeederAgent", "DistributedAgent", "CoordinatingAgent"] diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/agents/agents.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/agents/agents.py index ac02eb5..1fd7e96 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/agents/agents.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/agents/agents.py @@ -1,28 +1,24 @@ +from __future__ import annotations import dataclasses import importlib -import json import logging -from dataclasses import dataclass, field +from dataclasses import dataclass from datetime import datetime -from typing import Dict +from typing import Any import time -import os from functools import wraps import sys import inspect import atexit -from cimgraph.databases import ConnectionParameters from cimgraph.databases.gridappsd import GridappsdConnection -from cimgraph.models import FeederModel from cimgraph.models.distributed_area import DistributedArea from gridappsd import DifferenceBuilder import gridappsd.topics as t from gridappsd_field_bus.field_interface.context import LocalContext -from gridappsd_field_bus.field_interface.gridappsd_field_bus import GridAPPSDMessageBus -from gridappsd_field_bus.field_interface.interfaces import (FieldMessageBus, MessageBusDefinition, MessageBusFactory) +from gridappsd_field_bus.field_interface.interfaces import FieldMessageBus, MessageBusDefinition, MessageBusFactory CIM_PROFILE = None @@ -33,17 +29,22 @@ decorator_logger = logging.getLogger("decorator_logger") decorator_logger.setLevel(logging.INFO) file_handler = logging.FileHandler("compute_req_log.txt") # Log file name -formatter = logging.Formatter('[COMPUTE_REQ] %(asctime)s - %(message)s') +formatter = logging.Formatter("[COMPUTE_REQ] %(asctime)s - %(message)s") file_handler.setFormatter(formatter) decorator_logger.addHandler(file_handler) +# Tracking dictionaries for compute_req decorator +function_call_counts: dict[str, int] = {} +message_size_totals: dict[str, int] = {} + + def set_cim_profile(cim_profile: str, iec61970_301: int): global CIM_PROFILE global IEC61970_301 global cim CIM_PROFILE = cim_profile IEC61970_301 = iec61970_301 - cim = importlib.import_module('cimgraph.data_profile.' + cim_profile) + cim = importlib.import_module("cimgraph.data_profile." + cim_profile) @dataclass @@ -51,8 +52,9 @@ class AgentRegistrationDetails: agent_id: str app_id: str description: str - upstream_message_bus_id: FieldMessageBus.id - downstream_message_bus_id: FieldMessageBus.id + upstream_message_bus_id: str + downstream_message_bus_id: str + @atexit.register def call_counter_report(): @@ -60,34 +62,37 @@ def call_counter_report(): for func_name, count in function_call_counts.items(): decorator_logger.info(f"{func_name} was called {count} time(s)") + @atexit.register def message_size_report(): decorator_logger.info("Total message size summary:") for func_name, total_size in message_size_totals.items(): decorator_logger.info(f"{func_name} total message size: {total_size} bytes") + def compute_req(cls): functions = [ - '__init__', - #'on_measurement', - 'on_upstream_message', - 'on_downstream_message', - 'on_request', - 'publish_upstream', - 'publish_downstream', - 'send_control_command' + "__init__", + #'on_measurement', + "on_upstream_message", + "on_downstream_message", + "on_request", + "publish_upstream", + "publish_downstream", + "send_control_command", ] def call_counter(func): name = func.__qualname__ - + @wraps(func) def wrapper(*args, **kwargs): - if args[0].agent_id+'.'+name not in function_call_counts: - function_call_counts[args[0].agent_id+'.'+name] = 0 - function_call_counts[args[0].agent_id+'.'+name] += 1 - #decorator_logger.info(f"{name} called {function_call_counts[name]} times") + if args[0].agent_id + "." + name not in function_call_counts: + function_call_counts[args[0].agent_id + "." + name] = 0 + function_call_counts[args[0].agent_id + "." + name] += 1 + # decorator_logger.info(f"{name} called {function_call_counts[name]} times") return func(*args, **kwargs) + return wrapper def timed(func): @@ -97,9 +102,12 @@ def wrapper(*args, **kwargs): result = func(*args, **kwargs) end = time.perf_counter() class_name = args[0].__class__.__name__ if args else "" - if func.__name__ == '__init__': - decorator_logger.info(f"{class_name}.{func.__name__}.{args[0].agent_id} took: {end - start:.6f} seconds") + if func.__name__ == "__init__": + decorator_logger.info( + f"{class_name}.{func.__name__}.{args[0].agent_id} took: {end - start:.6f} seconds" + ) return result + return wrapper def get_deep_size(func): @@ -119,20 +127,23 @@ def deep_size(obj, seen=None): size += sum(deep_size(k, seen) + deep_size(v, seen) for k, v in obj.items()) elif isinstance(obj, (list, tuple, set, frozenset)): size += sum(deep_size(i, seen) for i in obj) - elif hasattr(obj, '__dict__'): + elif hasattr(obj, "__dict__"): for attr_name, attr_value in vars(obj).items(): - if attr_name in ['feeder_area', 'switch_area', 'secondary_area']: + if attr_name in ["feeder_area", "switch_area", "secondary_area"]: continue size += deep_size(attr_value, seen) - elif hasattr(obj, '__slots__'): + elif hasattr(obj, "__slots__"): size += sum(deep_size(getattr(obj, slot), seen) for slot in obj.__slots__ if hasattr(obj, slot)) return size self = args[0] obj_size = deep_size(self) - decorator_logger.info(f"{self.__class__.__name__}.{func.__name__}.{args[0].agent_id} size is: {obj_size} bytes") + decorator_logger.info( + f"{self.__class__.__name__}.{func.__name__}.{args[0].agent_id} size is: {obj_size} bytes" + ) return result + return wrapper def get_graph_size(func): @@ -140,47 +151,55 @@ def get_graph_size(func): def wrapper(*args, **kwargs): self = args[0] result = func(*args, **kwargs) - area_names = ['feeder_area', 'switch_area', 'secondary_area'] + area_names = ["feeder_area", "switch_area", "secondary_area"] area_found = False for name in area_names: area_dict = getattr(self, name, None) - if area_dict is not None and hasattr(area_dict, 'graph'): + if area_dict is not None and hasattr(area_dict, "graph"): graph_keys = [key.__name__ for key in list(area_dict.graph.keys())] size = len(area_dict.graph.keys()) - decorator_logger.info(f"{self.__class__.__name__}.{func.__name__}.{args[0].agent_id} length of graph: {size}") - decorator_logger.info(f"{self.__class__.__name__}.{name}.{args[0].agent_id} graph keys: {graph_keys}") + decorator_logger.info( + f"{self.__class__.__name__}.{func.__name__}.{args[0].agent_id} length of graph: {size}" + ) + decorator_logger.info( + f"{self.__class__.__name__}.{name}.{args[0].agent_id} graph keys: {graph_keys}" + ) area_found = True break if not area_found: - decorator_logger.error(f"{class_name}.{func.__name__}.{args[0].agent_id} No area dictionary (feeder/switch/secondary) found in {self.__class__.__name__}") + decorator_logger.error( + f"{class_name}.{func.__name__}.{args[0].agent_id} No area dictionary (feeder/switch/secondary) found in {self.__class__.__name__}" + ) return result + return wrapper def log_message_size(func): name = func.__qualname__ - + @wraps(func) def wrapper(*args, **kwargs): sig = inspect.signature(func) bound_args = sig.bind(*args, **kwargs) bound_args.apply_defaults() - if 'message' in bound_args.arguments: - msg = bound_args.arguments['message'] + if "message" in bound_args.arguments: + msg = bound_args.arguments["message"] size = sys.getsizeof(msg) - if args[0].agent_id+'.'+name not in message_size_totals: - message_size_totals[args[0].agent_id+'.'+name] = 0 - message_size_totals[args[0].agent_id+'.'+name] += size + if args[0].agent_id + "." + name not in message_size_totals: + message_size_totals[args[0].agent_id + "." + name] = 0 + message_size_totals[args[0].agent_id + "." + name] += size - if 'differenceBuilder' in bound_args.arguments: - msg = bound_args.arguments['differenceBuilder'] + if "differenceBuilder" in bound_args.arguments: + msg = bound_args.arguments["differenceBuilder"] size = sys.getsizeof(msg) - if args[0].agent_id+'.'+name not in message_size_totals: - message_size_totals[args[0].agent_id+'.'+name] = 0 - message_size_totals[args[0].agent_id+'.'+name] += size + if args[0].agent_id + "." + name not in message_size_totals: + message_size_totals[args[0].agent_id + "." + name] = 0 + message_size_totals[args[0].agent_id + "." + name] += size return func(*args, **kwargs) + return wrapper # Decorate the relevant functions @@ -188,7 +207,7 @@ def wrapper(*args, **kwargs): if hasattr(cls, attr_name): original_func = getattr(cls, attr_name) if callable(original_func): - if attr_name == '__init__': + if attr_name == "__init__": decorated = get_deep_size(get_graph_size(timed(original_func))) else: decorated = call_counter(log_message_size(timed(original_func))) @@ -196,42 +215,42 @@ def wrapper(*args, **kwargs): return cls -class DistributedAgent: - def __init__(self, - upstream_message_bus_def: MessageBusDefinition, - downstream_message_bus_def: MessageBusDefinition, - agent_config, - agent_area_dict=None, - simulation_id=None, - cim_profile: str = None): +class DistributedAgent: + def __init__( + self, + upstream_message_bus_def: MessageBusDefinition, + downstream_message_bus_def: MessageBusDefinition, + agent_config, + agent_area_dict=None, + simulation_id=None, + cim_profile: str = None, + ): """ Creates a DistributedAgent object that connects to the specified message buses and gets context based on feeder id and area id. """ _log.debug(f"Creating DistributedAgent: {self.__class__.__name__}") - self.upstream_message_bus = None - self.downstream_message_bus = None + self.upstream_message_bus: FieldMessageBus | None = None + self.downstream_message_bus: FieldMessageBus | None = None self.simulation_id = simulation_id - self.context = None + self.context: dict[str, Any] | None = None self.connection = GridappsdConnection() self.connection.cim_profile = cim_profile - self.app_id = agent_config['app_id'] - self.description = agent_config['description'] - dt = datetime.now() - ts = datetime.timestamp(dt) - if ('context_manager' not in self.app_id): + self.app_id = agent_config["app_id"] + self.description = agent_config["description"] + if "context_manager" not in self.app_id: self.agent_id = "da_" + self.app_id else: - self.agent_id = downstream_message_bus_def.id + '.context_manager' + self.agent_id = downstream_message_bus_def.id + ".context_manager" self.agent_area_dict = agent_area_dict if upstream_message_bus_def is not None: self.upstream_message_bus = MessageBusFactory.create(upstream_message_bus_def) - + if downstream_message_bus_def is not None: self.downstream_message_bus = MessageBusFactory.create(downstream_message_bus_def) @@ -241,7 +260,6 @@ def __init__(self, self._connect() def _connect(self): - if self.upstream_message_bus is not None: self.upstream_message_bus.connect() if self.downstream_message_bus is not None: @@ -249,23 +267,21 @@ def _connect(self): if self.downstream_message_bus is None and self.upstream_message_bus is None: raise ValueError("Either upstream or downstream bus must be specified!") - if ('context_manager' not in self.app_id): + if "context_manager" not in self.app_id: self.agent_id = "da_" + self.app_id + "_" + self.downstream_message_bus.id if self.agent_area_dict is None: context = LocalContext.get_context_by_message_bus(self.downstream_message_bus) - self.agent_area_dict = context['data'] + self.agent_area_dict = context["data"] self.subscribe_to_measurement() self.subscribe_to_messages() self.subscribe_to_requests() - if ('context_manager' not in self.app_id): - LocalContext.register_agent(self.downstream_message_bus, self.upstream_message_bus, - self) + if "context_manager" not in self.app_id: + LocalContext.register_agent(self.downstream_message_bus, self.upstream_message_bus, self) def disconnect(self): - if self.upstream_message_bus is not None: self.upstream_message_bus.disconnect() if self.downstream_message_bus is not None: @@ -274,201 +290,206 @@ def disconnect(self): def subscribe_to_measurement(self): if self.simulation_id is None: self.downstream_message_bus.subscribe( - t.field_output_topic(self.downstream_message_bus.id), self.on_measurement) + t.field_output_topic(self.downstream_message_bus.id), self.on_measurement + ) else: topic = t.field_output_topic(self.downstream_message_bus.id, self.simulation_id) _log.debug(f"subscribing to simulation output on topic {topic}") self.downstream_message_bus.subscribe(topic, self.on_simulation_output) def subscribe_to_messages(self): - self.downstream_message_bus.subscribe( - t.field_message_bus_topic(self.downstream_message_bus.id), self.on_downstream_message) + t.field_message_bus_topic(self.downstream_message_bus.id), self.on_downstream_message + ) self.upstream_message_bus.subscribe( - t.field_message_bus_topic(self.upstream_message_bus.id), self.on_upstream_message) + t.field_message_bus_topic(self.upstream_message_bus.id), self.on_upstream_message + ) _log.debug( f"Subscribing to messages on application topics: \n {t.field_message_bus_app_topic(self.downstream_message_bus.id, self.app_id)} \ \n {t.field_message_bus_app_topic(self.upstream_message_bus.id, self.app_id)}" ) self.downstream_message_bus.subscribe( - t.field_message_bus_app_topic(self.downstream_message_bus.id, self.app_id), - self.on_downstream_message) + t.field_message_bus_app_topic(self.downstream_message_bus.id, self.app_id), self.on_downstream_message + ) self.upstream_message_bus.subscribe( - t.field_message_bus_app_topic(self.upstream_message_bus.id, self.app_id), - self.on_upstream_message) + t.field_message_bus_app_topic(self.upstream_message_bus.id, self.app_id), self.on_upstream_message + ) - if ('context_manager' not in self.app_id): + if "context_manager" not in self.app_id: _log.debug( f"Subscribing to message on agents topics: \n {t.field_message_bus_agent_topic(self.downstream_message_bus.id, self.agent_id)} \ \n {t.field_message_bus_agent_topic(self.upstream_message_bus.id, self.agent_id)}" ) self.downstream_message_bus.subscribe( t.field_message_bus_agent_topic(self.downstream_message_bus.id, self.agent_id), - self.on_downstream_message) + self.on_downstream_message, + ) self.upstream_message_bus.subscribe( - t.field_message_bus_agent_topic(self.upstream_message_bus.id, self.agent_id), - self.on_upstream_message) + t.field_message_bus_agent_topic(self.upstream_message_bus.id, self.agent_id), self.on_upstream_message + ) def subscribe_to_requests(self): - _log.debug( f"Subscribing to requests on agents queue: \n {t.field_agent_request_queue(self.downstream_message_bus.id, self.agent_id)} \ \n {t.field_agent_request_queue(self.upstream_message_bus.id, self.agent_id)}" ) self.downstream_message_bus.subscribe( - t.field_agent_request_queue(self.downstream_message_bus.id, self.agent_id), - self.on_request_from_downstream) + t.field_agent_request_queue(self.downstream_message_bus.id, self.agent_id), self.on_request_from_downstream + ) self.upstream_message_bus.subscribe( - t.field_agent_request_queue(self.upstream_message_bus.id, self.agent_id), - self.on_request_from_uptream) + t.field_agent_request_queue(self.upstream_message_bus.id, self.agent_id), self.on_request_from_uptream + ) - def on_measurement(self, headers: Dict, message) -> None: + def on_measurement(self, headers: dict[str, Any], message) -> None: raise NotImplementedError(f"{self.__class__.__name__} must be overriden in child class") def on_simulation_output(self, headers, message): self.on_measurement(headers=headers, message=message) - def on_upstream_message(self, headers: Dict, message) -> None: + def on_upstream_message(self, headers: dict[str, Any], message) -> None: raise NotImplementedError(f"{self.__class__.__name__} must be overriden in child class") - def on_downstream_message(self, headers: Dict, message) -> None: + def on_downstream_message(self, headers: dict[str, Any], message) -> None: raise NotImplementedError(f"{self.__class__.__name__} must be overriden in child class") - def on_request_from_uptream(self, headers: Dict, message): + def on_request_from_uptream(self, headers: dict[str, Any], message): self.on_request(self.upstream_message_bus, headers, message) - def on_request_from_downstream(self, headers: Dict, message): + def on_request_from_downstream(self, headers: dict[str, Any], message): self.on_request(self.downstream_message_bus, headers, message) - def on_request(self, message_bus, headers: Dict, message): + def on_request(self, message_bus, headers: dict[str, Any], message): raise NotImplementedError(f"{self.__class__.__name__} must be overriden in child class") def get_registration_details(self): - details = AgentRegistrationDetails(str(self.agent_id), self.app_id, self.description, - self.upstream_message_bus.id, - self.downstream_message_bus.id) + details = AgentRegistrationDetails( + str(self.agent_id), + self.app_id, + self.description, + self.upstream_message_bus.id, + self.downstream_message_bus.id, + ) return dataclasses.asdict(details) def publish_downstream(self, message): - self.downstream_message_bus.send(t.field_message_bus_topic(self.downstream_message_bus.id), - message) + self.downstream_message_bus.send(t.field_message_bus_topic(self.downstream_message_bus.id), message) def publish_upstream(self, message): - self.upstream_message_bus.send(t.field_message_bus_topic(self.upstream_message_bus.id), - message) + self.upstream_message_bus.send(t.field_message_bus_topic(self.upstream_message_bus.id), message) def send_control_command(self, differenceBuilder: DifferenceBuilder): - if self.simulation_id is not None: + if self.simulation_id is not None and self.downstream_message_bus is not None: LocalContext.send_control_command(self.downstream_message_bus, differenceBuilder) - ''' + """ TODO This block needs to be tested with device interface else: self.downstream_message_bus.send(devie_interface_topic, differenceBuilder) - ''' + """ -''' TODO this has not been implemented yet, so we are commented them out for now. +""" TODO this has not been implemented yet, so we are commented them out for now. # not all agent would use this def on_control(self, control): device_id = control.get('device') command = control.get('command') self.control_device(device_id, command) -''' +""" -class SubstationAgent(DistributedAgent): - def __init__(self, - upstream_message_bus_def: MessageBusDefinition, - downstream_message_bus_def: MessageBusDefinition, - agent_config: Dict, - substation_dict=None, - simulation_id=None): - super().__init__(upstream_message_bus_def, downstream_message_bus_def, agent_config, - substation_dict, simulation_id) +class SubstationAgent(DistributedAgent): + def __init__( + self, + upstream_message_bus_def: MessageBusDefinition, + downstream_message_bus_def: MessageBusDefinition, + agent_config: dict[str, Any], + substation_dict=None, + simulation_id=None, + ): + super().__init__( + upstream_message_bus_def, downstream_message_bus_def, agent_config, substation_dict, simulation_id + ) self.substation_area = None self.downstream_message_bus_def = downstream_message_bus_def self._connect() - if self.agent_area_dict is not None: + if self.agent_area_dict is not None and cim is not None: substation = cim.Substation(mRID=self.downstream_message_bus_def.id) - self.substation_area = DistributedArea(connection=self.connection, - container=substation, - distributed=True) + self.substation_area = DistributedArea(connection=self.connection, container=substation, distributed=True) self.substation_area.build_from_topo_message(topology_dict=self.agent_area_dict) -class FeederAgent(DistributedAgent): - def __init__(self, - upstream_message_bus_def: MessageBusDefinition, - downstream_message_bus_def: MessageBusDefinition, - agent_config: Dict, - feeder_dict=None, - simulation_id=None): - super().__init__(upstream_message_bus_def, downstream_message_bus_def, agent_config, - feeder_dict, simulation_id) +class FeederAgent(DistributedAgent): + def __init__( + self, + upstream_message_bus_def: MessageBusDefinition, + downstream_message_bus_def: MessageBusDefinition, + agent_config: dict[str, Any], + feeder_dict=None, + simulation_id=None, + ): + super().__init__(upstream_message_bus_def, downstream_message_bus_def, agent_config, feeder_dict, simulation_id) self.feeder_area = None self.downstream_message_bus_def = downstream_message_bus_def self._connect() - if self.agent_area_dict is not None: + if self.agent_area_dict is not None and cim is not None: feeder = cim.FeederArea(mRID=self.downstream_message_bus_def.id) - self.feeder_area = DistributedArea(connection=self.connection, - container=feeder, - distributed=True) + self.feeder_area = DistributedArea(connection=self.connection, container=feeder, distributed=True) self.feeder_area.build_from_topo_message(topology_dict=self.agent_area_dict) class SwitchAreaAgent(DistributedAgent): - - def __init__(self, - upstream_message_bus_def: MessageBusDefinition, - downstream_message_bus_def: MessageBusDefinition, - agent_config: Dict, - switch_area_dict=None, - simulation_id=None): - super().__init__(upstream_message_bus_def, downstream_message_bus_def, agent_config, - switch_area_dict, simulation_id) + def __init__( + self, + upstream_message_bus_def: MessageBusDefinition, + downstream_message_bus_def: MessageBusDefinition, + agent_config: dict[str, Any], + switch_area_dict=None, + simulation_id=None, + ): + super().__init__( + upstream_message_bus_def, downstream_message_bus_def, agent_config, switch_area_dict, simulation_id + ) self.switch_area = None self.downstream_message_bus_def = downstream_message_bus_def self._connect() - if self.agent_area_dict is not None: + if self.agent_area_dict is not None and cim is not None: container = cim.SwitchArea(mRID=self.downstream_message_bus_def.id) - self.switch_area = DistributedArea(container=container, - connection=self.connection, - distributed=True) + self.switch_area = DistributedArea(container=container, connection=self.connection, distributed=True) self.switch_area.build_from_topo_message(topology_dict=self.agent_area_dict) class SecondaryAreaAgent(DistributedAgent): - - def __init__(self, - upstream_message_bus_def: MessageBusDefinition, - downstream_message_bus_def: MessageBusDefinition, - agent_config: Dict, - secondary_area_dict=None, - simulation_id=None): - super().__init__(upstream_message_bus_def, downstream_message_bus_def, agent_config, - secondary_area_dict, simulation_id) + def __init__( + self, + upstream_message_bus_def: MessageBusDefinition, + downstream_message_bus_def: MessageBusDefinition, + agent_config: dict[str, Any], + secondary_area_dict=None, + simulation_id=None, + ): + super().__init__( + upstream_message_bus_def, downstream_message_bus_def, agent_config, secondary_area_dict, simulation_id + ) self.secondary_area = None self.downstream_message_bus_def = downstream_message_bus_def self._connect() - if self.agent_area_dict is not None: - if len(self.agent_area_dict['AddressableEquipment']) == 0: + if self.agent_area_dict is not None and cim is not None: + if len(self.agent_area_dict["AddressableEquipment"]) == 0: + downstream_id = self.downstream_message_bus.id if self.downstream_message_bus else "unknown" _log.warning( - f"No addressable equipment in the secondary area with down stream message bus id: {self.downstream_message_bus.id}." + f"No addressable equipment in the secondary area with down stream message bus id: {downstream_id}." ) container = cim.SecondaryArea(mRID=self.downstream_message_bus_def.id) - self.secondary_area = DistributedArea(container=container, - connection=self.connection, - distributed=True) + self.secondary_area = DistributedArea(container=container, connection=self.connection, distributed=True) self.secondary_area.build_from_topo_message(topology_dict=self.agent_area_dict) @@ -483,12 +504,9 @@ class CoordinatingAgent: upstream, peer , downstream and broadcast """ - def __init__(self, - feeder_id, - system_message_bus_def: MessageBusDefinition, - simulation_id=None): + def __init__(self, feeder_id, system_message_bus_def: MessageBusDefinition, simulation_id=None): self.feeder_id = feeder_id - self.distributed_agents = [] + self.distributed_agents: list[DistributedAgent] = [] self.system_message_bus = MessageBusFactory.create(system_message_bus_def) self.system_message_bus.connect() @@ -505,7 +523,7 @@ def __init__(self, # self.subscribe_to_feeder_bus() -''' def spawn_distributed_agent(self, distributed_agent: DistributedAgent): +""" def spawn_distributed_agent(self, distributed_agent: DistributedAgent): distributed_agent.connect() self.distributed_agents.append(distributed_agent) @@ -522,4 +540,4 @@ def publish_to_distribution_bus_agent(self,agent_id, message): def control_device(self, device_id, command): device_topic = self.devices.get(device_id) - self.secondary_message_bus.publish(device_topic, command)''' + self.secondary_message_bus.publish(device_topic, command)""" diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context.py index fe74afd..fd626b0 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context.py @@ -1,8 +1,6 @@ from gridappsd import DifferenceBuilder from gridappsd_field_bus.field_interface.interfaces import FieldMessageBus -import dataclasses import gridappsd.topics as t -import json import logging from gridappsd.goss import TimeoutError import time @@ -11,21 +9,15 @@ class LocalContext: - @classmethod - def get_context_by_feeder(cls, - downstream_message_bus: FieldMessageBus, - feeder_mrid, - area_id=None): - - request = {'request_type': 'get_context', 'modelId': feeder_mrid, 'areaId': area_id} + def get_context_by_feeder(cls, downstream_message_bus: FieldMessageBus, feeder_mrid, area_id=None): + request = {"request_type": "get_context", "modelId": feeder_mrid, "areaId": area_id} response = None while response is None: try: - response = downstream_message_bus.get_response(t.context_request_queue( - downstream_message_bus.id), - request, - timeout=10) + response = downstream_message_bus.get_response( + t.context_request_queue(downstream_message_bus.id), request, timeout=10 + ) except TimeoutError: _log.warning("Context request timed out. Trying again...") time.sleep(5) @@ -38,14 +30,13 @@ def get_context_by_message_bus(cls, downstream_message_bus: FieldMessageBus): return agents/devices based on downstream message bus as input """ - request = {'request_type': 'get_context', 'areaId': downstream_message_bus.id} + request = {"request_type": "get_context", "areaId": downstream_message_bus.id} response = None while response is None: try: - response = downstream_message_bus.get_response(t.context_request_queue( - downstream_message_bus.id), - request, - timeout=10) + response = downstream_message_bus.get_response( + t.context_request_queue(downstream_message_bus.id), request, timeout=10 + ) except TimeoutError: _log.warning("Context request timed out. Trying again...") time.sleep(5) @@ -53,13 +44,12 @@ def get_context_by_message_bus(cls, downstream_message_bus: FieldMessageBus): return response @classmethod - def register_agent(cls, downstream_message_bus: FieldMessageBus, - upstream_message_bus: FieldMessageBus, agent): + def register_agent(cls, downstream_message_bus: FieldMessageBus, upstream_message_bus: FieldMessageBus, agent): """ Sends the newly created distributed agent's info to OT bus """ - request = {'request_type': 'register_agent', 'agent': agent.get_registration_details()} + request = {"request_type": "register_agent", "agent": agent.get_registration_details()} downstream_message_bus.send(t.context_request_queue(downstream_message_bus.id), request) upstream_message_bus.send(t.context_request_queue(upstream_message_bus.id), request) @@ -69,21 +59,18 @@ def get_agents(cls, downstream_message_bus: FieldMessageBus): Sends the newly created distributed agent's info to OT bus """ - request = {'request_type': 'get_agents'} + request = {"request_type": "get_agents"} return downstream_message_bus.get_response( - t.context_request_queue(downstream_message_bus.id), request) + t.context_request_queue(downstream_message_bus.id), request, timeout=5 + ) @classmethod - def send_control_command(cls, downstream_message_bus: FieldMessageBus, - difference_builder: DifferenceBuilder): + def send_control_command(cls, downstream_message_bus: FieldMessageBus, difference_builder: DifferenceBuilder): """ Sends the control command to device """ - request = { - 'request_type': 'control_command', - 'difference_builder': difference_builder.get_message() - } + request = {"request_type": "control_command", "difference_builder": difference_builder.get_message()} downstream_message_bus.send(t.context_request_queue(downstream_message_bus.id), request) diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/__init__.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/__init__.py index 3df1f04..e107f58 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/__init__.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/__init__.py @@ -1,9 +1,16 @@ from typing import List -from gridappsd_field_bus.field_interface.context_managers.context_manager_agents import (SubstationAreaContextManager, - FeederAreaContextManager, - SwitchAreaContextManager, - SecondaryAreaContextManager) +from gridappsd_field_bus.field_interface.context_managers.context_manager_agents import ( + SubstationAreaContextManager, + FeederAreaContextManager, + SwitchAreaContextManager, + SecondaryAreaContextManager, +) -__all__: List[str] = ["SubstationAreaContextManager","FeederAreaContextManager","SwitchAreaContextManager","SecondaryAreaContextManager"] +__all__: List[str] = [ + "SubstationAreaContextManager", + "FeederAreaContextManager", + "SwitchAreaContextManager", + "SecondaryAreaContextManager", +] diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/centralized_context_managers.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/centralized_context_managers.py index 5012d4d..3a33dae 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/centralized_context_managers.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/centralized_context_managers.py @@ -7,81 +7,83 @@ import gridappsd.topics as t import gridappsd_field_bus.field_interface.agents.agents as agents_mod from gridappsd_field_bus.field_interface.context_managers.utils import REQUEST_FIELD, get_message_bus_definition -from gridappsd_field_bus.field_interface.context_managers.context_manager_agents import FeederAreaContextManager, SwitchAreaContextManager, SecondaryAreaContextManager +from gridappsd_field_bus.field_interface.context_managers.context_manager_agents import ( + FeederAreaContextManager, + SwitchAreaContextManager, + SecondaryAreaContextManager, +) cim_profile = CIM_PROFILE.CIMHUB_2023.value agents_mod.set_cim_profile(cim_profile=cim_profile, iec61970_301=7) cim = agents_mod.cim logging.basicConfig(level=logging.DEBUG) -logging.getLogger('goss').setLevel(logging.ERROR) -logging.getLogger('stomp.py').setLevel(logging.ERROR) +logging.getLogger("goss").setLevel(logging.ERROR) +logging.getLogger("stomp.py").setLevel(logging.ERROR) _log = logging.getLogger(__name__) -def _main(): +def _main(): time.sleep(10) parser = argparse.ArgumentParser() parser.add_argument( "--simulation_id", help="Simulation id to use for communicating with simulated devices on the message bus. \ If simulation_id is not provided then Context Manager assumes to run on deployed field with real devices.", - required=False) + required=False, + ) opts = parser.parse_args() simulation_id = opts.simulation_id agent_config = { - "app_id": - "context_manager", - "description": - "This agent provides topological context information like neighboring agents and devices to other distributed agents" + "app_id": "context_manager", + "description": "This agent provides topological context information like neighboring agents and devices to other distributed agents", } gapps = GridAPPSD() response = gapps.get_response(t.PLATFORM_STATUS, {"isField": True}) - field_model_mrid = response['fieldModelMrid'] + field_model_mrid = response["fieldModelMrid"] is_field_initialized = False while not is_field_initialized: response = gapps.get_response(REQUEST_FIELD, {"request_type": "is_initilized"}) print(response) - is_field_initialized = response['data']['initialized'] + is_field_initialized = response["data"]["initialized"] time.sleep(1) - - - system_message_bus_def = get_message_bus_definition(field_model_mrid) feeder_message_bus_def = get_message_bus_definition(field_model_mrid) - #TODO: create access control for agents for different layers - feeder_agent = FeederAreaContextManager(system_message_bus_def, - feeder_message_bus_def, - agent_config, - simulation_id=simulation_id) - - #print(feeder_agent.agent_area_dict) - for switch_area in feeder_agent.agent_area_dict['SwitchAreas']: - switch_area_message_bus_def = get_message_bus_definition(str(switch_area['@id'])) - print("Creating switch area agent " + str(switch_area['@id'])) - switch_area_agent = SwitchAreaContextManager(feeder_message_bus_def, - switch_area_message_bus_def, - agent_config, - simulation_id=simulation_id, - switch_area_dict=switch_area) + # TODO: create access control for agents for different layers + feeder_agent = FeederAreaContextManager( + system_message_bus_def, feeder_message_bus_def, agent_config, simulation_id=simulation_id + ) + + # print(feeder_agent.agent_area_dict) + for switch_area in feeder_agent.agent_area_dict["SwitchAreas"]: + switch_area_message_bus_def = get_message_bus_definition(str(switch_area["@id"])) + print("Creating switch area agent " + str(switch_area["@id"])) + SwitchAreaContextManager( + feeder_message_bus_def, + switch_area_message_bus_def, + agent_config, + simulation_id=simulation_id, + switch_area_dict=switch_area, + ) # create secondary area distributed agents - for secondary_area in switch_area['SecondaryAreas']: - secondary_area_message_bus_def = get_message_bus_definition( - str(secondary_area['@id'])) - print("Creating secondary area agent " + str(secondary_area['@id'])) - secondary_area_agent = SecondaryAreaContextManager(switch_area_message_bus_def, - secondary_area_message_bus_def, - agent_config, - simulation_id=simulation_id, - secondary_area_dict=secondary_area) + for secondary_area in switch_area["SecondaryAreas"]: + secondary_area_message_bus_def = get_message_bus_definition(str(secondary_area["@id"])) + print("Creating secondary area agent " + str(secondary_area["@id"])) + SecondaryAreaContextManager( + switch_area_message_bus_def, + secondary_area_message_bus_def, + agent_config, + simulation_id=simulation_id, + secondary_area_dict=secondary_area, + ) while True: try: diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/context_manager_agents.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/context_manager_agents.py index 8039008..f50d95f 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/context_manager_agents.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/context_manager_agents.py @@ -1,232 +1,230 @@ +from __future__ import annotations import logging import time -from typing import Dict +from typing import Any import gridappsd.topics as t from gridappsd import GridAPPSD -from gridappsd_field_bus.field_interface.agents import (SubstationAgent, FeederAgent, SecondaryAreaAgent, SwitchAreaAgent) +from gridappsd_field_bus.field_interface.agents import SubstationAgent, FeederAgent, SecondaryAreaAgent, SwitchAreaAgent from gridappsd_field_bus.field_interface.interfaces import MessageBusDefinition from gridappsd_field_bus.field_interface.context_managers.utils import REQUEST_FIELD logging.basicConfig(level=logging.DEBUG) -logging.getLogger('goss').setLevel(logging.ERROR) -logging.getLogger('stomp.py').setLevel(logging.ERROR) +logging.getLogger("goss").setLevel(logging.ERROR) +logging.getLogger("stomp.py").setLevel(logging.ERROR) _log = logging.getLogger(__name__) -class SubstationAreaContextManager(SubstationAgent): - - def __init__(self, - upstream_message_bus_def: MessageBusDefinition, - downstream_message_bus_def: MessageBusDefinition, - agent_config: Dict, - substation_dict: Dict = None, - simulation_id: str = None): +class SubstationAreaContextManager(SubstationAgent): + def __init__( + self, + upstream_message_bus_def: MessageBusDefinition, + downstream_message_bus_def: MessageBusDefinition, + agent_config: dict[str, Any], + substation_dict: dict[str, Any] | None = None, + simulation_id: str | None = None, + ): self.ot_connection = GridAPPSD() if substation_dict is None: - request = {'request_type': 'get_context', 'areaId': downstream_message_bus_def.id} + request = {"request_type": "get_context", "areaId": downstream_message_bus_def.id} substation_dict = None while substation_dict is None: self.ot_connection.get_logger().debug(f"Requesting topology for {self.__class__}") response = self.ot_connection.get_response(REQUEST_FIELD, request, timeout=10) - if 'DistributionArea' in response: - substation_dict = response['DistributionArea']['Substation']['@id'] + if "DistributionArea" in response: + substation_dict = response["DistributionArea"]["Substation"]["@id"] self.ot_connection.get_logger().debug("Topology received at Substation Area Context Manager") else: time.sleep(5) - super().__init__(upstream_message_bus_def, downstream_message_bus_def, agent_config, - substation_dict, simulation_id) + super().__init__( + upstream_message_bus_def, downstream_message_bus_def, agent_config, substation_dict, simulation_id + ) - #Override agent_id to a static value - self.agent_id = downstream_message_bus_def.id + '.context_manager' + # Override agent_id to a static value + self.agent_id = downstream_message_bus_def.id + ".context_manager" - self.context = {'data':substation_dict} + self.context: dict[str, Any] = {"data": substation_dict} - self.registered_agents = {} + self.registered_agents: dict[str, Any] = {} self.registered_agents[self.agent_id] = self.get_registration_details() - self.neighbouring_agents = {} - self.upstream_agents = {} - self.downstream_agents = {} + self.neighbouring_agents: dict[str, Any] = {} + self.upstream_agents: dict[str, Any] = {} + self.downstream_agents: dict[str, Any] = {} self.ot_connection.get_logger().info("Substation Area Context Manager Created") class FeederAreaContextManager(FeederAgent): - - def __init__(self, - upstream_message_bus_def: MessageBusDefinition, - downstream_message_bus_def: MessageBusDefinition, - agent_config: Dict, - feeder_dict: Dict = None, - simulation_id: str = None): - + def __init__( + self, + upstream_message_bus_def: MessageBusDefinition, + downstream_message_bus_def: MessageBusDefinition, + agent_config: dict[str, Any], + feeder_dict: dict[str, Any] | None = None, + simulation_id: str | None = None, + ): self.ot_connection = GridAPPSD() if feeder_dict is None: - request = {'request_type': 'get_context', 'areaId': downstream_message_bus_def.id} + request = {"request_type": "get_context", "areaId": downstream_message_bus_def.id} feeder_dict = None while feeder_dict is None: self.ot_connection.get_logger().debug(f"Requesting topology for {self.__class__}") response = self.ot_connection.get_response(REQUEST_FIELD, request, timeout=10) - if 'data' in response: - feeder_dict = response['data'] + if "data" in response: + feeder_dict = response["data"] self.ot_connection.get_logger().debug("Topology received at Feeder Area Context Manager") else: time.sleep(5) - super().__init__(upstream_message_bus_def, downstream_message_bus_def, agent_config, - feeder_dict, simulation_id) + super().__init__(upstream_message_bus_def, downstream_message_bus_def, agent_config, feeder_dict, simulation_id) - #Override agent_id to a static value - self.agent_id = downstream_message_bus_def.id + '.context_manager' + # Override agent_id to a static value + self.agent_id = downstream_message_bus_def.id + ".context_manager" - self.context = {'data':feeder_dict} + self.context: dict[str, Any] = {"data": feeder_dict} - self.registered_agents = {} + self.registered_agents: dict[str, Any] = {} self.registered_agents[self.agent_id] = self.get_registration_details() - self.neighbouring_agents = {} - self.upstream_agents = {} - self.downstream_agents = {} + self.neighbouring_agents: dict[str, Any] = {} + self.upstream_agents: dict[str, Any] = {} + self.downstream_agents: dict[str, Any] = {} self.ot_connection.get_logger().info("Feeder Area Context Manager Created") - def on_request(self, message_bus, headers: Dict, message): - + def on_request(self, message_bus, headers: dict[str, Any], message): _log.debug(f"Received request: {message}") - if message['request_type'] == 'get_context': - reply_to = headers['reply-to'] + if message["request_type"] == "get_context": + reply_to = headers["reply-to"] if self.context is None: self.context = self.ot_connection.get_response(REQUEST_FIELD, message) message_bus.send(reply_to, self.context) - elif message['request_type'] == 'register_agent': + elif message["request_type"] == "register_agent": self.ot_connection.send(t.REGISTER_AGENT_QUEUE, message) - self.registered_agents[message['agent']['agent_id']] = message['agent'] + self.registered_agents[message["agent"]["agent_id"]] = message["agent"] - elif message['request_type'] == 'get_agents': - reply_to = headers['reply-to'] + elif message["request_type"] == "get_agents": + reply_to = headers["reply-to"] message_bus.send(reply_to, self.registered_agents) - elif message['request_type'] == 'is_initialized': - reply_to = headers['reply-to'] - message = {'initialized': True} + elif message["request_type"] == "is_initialized": + reply_to = headers["reply-to"] + message = {"initialized": True} message_bus.send(reply_to, message) - elif message['request_type'] == 'control_command': - simulation_id = message['input']['simulation_id'] + elif message["request_type"] == "control_command": + simulation_id = message["input"]["simulation_id"] self.ot_connection.send(t.simulation_input_topic(simulation_id), message) class SwitchAreaContextManager(SwitchAreaAgent): - - def __init__(self, - upstream_message_bus_def: MessageBusDefinition, - downstream_message_bus_def: MessageBusDefinition, - agent_config: Dict, - switch_area_dict: Dict = None, - simulation_id: str = None): - + def __init__( + self, + upstream_message_bus_def: MessageBusDefinition, + downstream_message_bus_def: MessageBusDefinition, + agent_config: dict[str, Any], + switch_area_dict: dict[str, Any] | None = None, + simulation_id: str | None = None, + ): self.ot_connection = GridAPPSD() if switch_area_dict is None: - request = {'request_type': 'get_context', 'areaId': downstream_message_bus_def.id} - switch_area_dict = self.ot_connection.get_response(REQUEST_FIELD, request, - timeout=10)['data'] + request = {"request_type": "get_context", "areaId": downstream_message_bus_def.id} + switch_area_dict = self.ot_connection.get_response(REQUEST_FIELD, request, timeout=10)["data"] - super().__init__(upstream_message_bus_def, downstream_message_bus_def, agent_config, - switch_area_dict, simulation_id) + super().__init__( + upstream_message_bus_def, downstream_message_bus_def, agent_config, switch_area_dict, simulation_id + ) - #Override agent_id to a static value - self.agent_id = downstream_message_bus_def.id + '.context_manager' + # Override agent_id to a static value + self.agent_id = downstream_message_bus_def.id + ".context_manager" - self.context = {'data':switch_area_dict} + self.context: dict[str, Any] = {"data": switch_area_dict} - self.registered_agents = {} + self.registered_agents: dict[str, Any] = {} self.registered_agents[self.agent_id] = self.get_registration_details() - self.ot_connection.get_logger().info("Switch Area "+self.agent_id+" Context Manager Created") - - def on_request(self, message_bus, headers: Dict, message): + self.ot_connection.get_logger().info("Switch Area " + self.agent_id + " Context Manager Created") + def on_request(self, message_bus, headers: dict[str, Any], message): _log.debug(f"Received request: {message}") - if message['request_type'] == 'get_context': - #TODO: check for initialization - reply_to = headers['reply-to'] + if message["request_type"] == "get_context": + # TODO: check for initialization + reply_to = headers["reply-to"] if self.context is None: self.context = self.ot_connection.get_response(REQUEST_FIELD, message) message_bus.send(reply_to, self.context) - elif message['request_type'] == 'register_agent': - #TODO: check for initialization + elif message["request_type"] == "register_agent": + # TODO: check for initialization self.ot_connection.send(t.REGISTER_AGENT_QUEUE, message) - self.registered_agents[message['agent']['agent_id']] = message['agent'] + self.registered_agents[message["agent"]["agent_id"]] = message["agent"] - elif message['request_type'] == 'get_agents': - #TODO: check for initialization - reply_to = headers['reply-to'] + elif message["request_type"] == "get_agents": + # TODO: check for initialization + reply_to = headers["reply-to"] message_bus.send(reply_to, self.registered_agents) - elif message['request_type'] == 'is_initialized': - reply_to = headers['reply-to'] - message = {'initialized': True} + elif message["request_type"] == "is_initialized": + reply_to = headers["reply-to"] + message = {"initialized": True} message_bus.send(reply_to, message) - elif message['request_type'] == 'control_command': - simulation_id = message['input']['simulation_id'] + elif message["request_type"] == "control_command": + simulation_id = message["input"]["simulation_id"] self.ot_connection.send(t.simulation_input_topic(simulation_id), message) class SecondaryAreaContextManager(SecondaryAreaAgent): - - def __init__(self, - upstream_message_bus_def: MessageBusDefinition, - downstream_message_bus_def: MessageBusDefinition, - agent_config: Dict, - secondary_area_dict: Dict = None, - simulation_id: str = None): - + def __init__( + self, + upstream_message_bus_def: MessageBusDefinition, + downstream_message_bus_def: MessageBusDefinition, + agent_config: dict[str, Any], + secondary_area_dict: dict[str, Any] | None = None, + simulation_id: str | None = None, + ): self.ot_connection = GridAPPSD() if secondary_area_dict is None: - request = {'request_type': 'get_context', 'areaId': downstream_message_bus_def.id} - secondary_area_dict = self.ot_connection.get_response(REQUEST_FIELD, - request, - timeout=10)['data'] + request = {"request_type": "get_context", "areaId": downstream_message_bus_def.id} + secondary_area_dict = self.ot_connection.get_response(REQUEST_FIELD, request, timeout=10)["data"] - super().__init__(upstream_message_bus_def, downstream_message_bus_def, agent_config, - secondary_area_dict, simulation_id) + super().__init__( + upstream_message_bus_def, downstream_message_bus_def, agent_config, secondary_area_dict, simulation_id + ) - #Override agent_id to a static value - self.agent_id = downstream_message_bus_def.id + '.context_manager' + # Override agent_id to a static value + self.agent_id = downstream_message_bus_def.id + ".context_manager" - self.context = {'data':secondary_area_dict} + self.context: dict[str, Any] = {"data": secondary_area_dict} - self.registered_agents = {} + self.registered_agents: dict[str, Any] = {} self.registered_agents[self.agent_id] = self.get_registration_details() - self.ot_connection.get_logger().info("Secondary Area "+self.agent_id+" Context Manager Created") - - def on_request(self, message_bus, headers: Dict, message): + self.ot_connection.get_logger().info("Secondary Area " + self.agent_id + " Context Manager Created") + def on_request(self, message_bus, headers: dict[str, Any], message): _log.debug(f"Received request: {message}") _log.debug(f"Received request: {headers}") - if message['request_type'] == 'get_context': - reply_to = headers['reply-to'] + if message["request_type"] == "get_context": + reply_to = headers["reply-to"] if self.context is None: self.context = self.ot_connection.get_response(REQUEST_FIELD, message) message_bus.send(reply_to, self.context) - elif message['request_type'] == 'register_agent': + elif message["request_type"] == "register_agent": self.ot_connection.send(t.REGISTER_AGENT_QUEUE, message) - self.registered_agents[message['agent']['agent_id']] = message['agent'] + self.registered_agents[message["agent"]["agent_id"]] = message["agent"] - elif message['request_type'] == 'get_agents': - reply_to = headers['reply-to'] + elif message["request_type"] == "get_agents": + reply_to = headers["reply-to"] message_bus.send(reply_to, self.registered_agents) - elif message['request_type'] == 'is_initialized': - reply_to = headers['reply-to'] - message = {'initialized': True} + elif message["request_type"] == "is_initialized": + reply_to = headers["reply-to"] + message = {"initialized": True} message_bus.send(reply_to, message) - elif message['request_type'] == 'control_command': - simulation_id = message['input']['simulation_id'] + elif message["request_type"] == "control_command": + simulation_id = message["input"]["simulation_id"] self.ot_connection.send(t.simulation_input_topic(simulation_id), message) diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/substation.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/substation.py index 36300b4..d9f5cb5 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/substation.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/substation.py @@ -3,7 +3,6 @@ import logging import time -from cimgraph.data_profile import CIM_PROFILE import gridappsd_field_bus.field_interface.agents.agents as agents_mod from gridappsd_field_bus.field_interface.interfaces import MessageBusDefinition @@ -15,61 +14,63 @@ cim = agents_mod.cim logging.basicConfig(level=logging.DEBUG) -logging.getLogger('goss').setLevel(logging.ERROR) -logging.getLogger('stomp.py').setLevel(logging.ERROR) +logging.getLogger("goss").setLevel(logging.ERROR) +logging.getLogger("stomp.py").setLevel(logging.ERROR) _log = logging.getLogger(__name__) -def _main(): +def _main(): parser = argparse.ArgumentParser() parser.add_argument( "--simulation_id", help="Simulation id to use for communicating with simulated devices on the message bus. \ If simulation_id is not provided then Context Manager assumes to run on deployed field with real devices.", - required=False) + required=False, + ) parser.add_argument( "-u", "--upstream_system_message_bus", help="Yaml file to connect with upstream system(OT) message bus.", - required=True) + required=True, + ) parser.add_argument( "-d", "--downstream_substation_message_bus", help="Yaml file to connect with downstream substation area message bus.", type=str, - required=True) + required=True, + ) parser.add_argument( "--substation_dict", help="JSON file containing substation topology dictionary. If this file is not provided then disctionary is requested by Field Bus Manager using upstream message bus.", type=str, - required=False) + required=False, + ) opts = parser.parse_args() simulation_id = opts.simulation_id agent_config = { - "app_id": - "context_manager", - "description": - "This agent provides topological context information like neighboring agents and devices to other distributed agents" + "app_id": "context_manager", + "description": "This agent provides topological context information like neighboring agents and devices to other distributed agents", } - system_message_bus_def = MessageBusDefinition.load(opts.upstream_system_message_bus) substation_message_bus_def = MessageBusDefinition.load(opts.downstream_substation_message_bus) - with open(opts.substation_dict,encoding="utf-8") as f: + with open(opts.substation_dict, encoding="utf-8") as f: substation_dict = json.load(f)["DistributionArea"]["Substations"][0] - - substation_agent = SubstationAreaContextManager(system_message_bus_def, - substation_message_bus_def, - agent_config, - substation_dict = substation_dict, - simulation_id=simulation_id) + substation_agent = SubstationAreaContextManager( + system_message_bus_def, + substation_message_bus_def, + agent_config, + substation_dict=substation_dict, + simulation_id=simulation_id, + ) print(substation_agent.context) diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/utils.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/utils.py index af8bfdb..18df36e 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/utils.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/context_managers/utils.py @@ -3,21 +3,23 @@ import gridappsd.topics as t from gridappsd_field_bus.field_interface.interfaces import MessageBusDefinition, ConnectionType -#FieldBusManager's request topics. To be used only by context manager user role only. +# FieldBusManager's request topics. To be used only by context manager user role only. REQUEST_FIELD = ".".join((t.PROCESS_PREFIX, "request.field")) -def get_message_bus_definition(area_id: str) -> MessageBusDefinition: - connection_args = { - "GRIDAPPSD_ADDRESS": os.environ.get('GRIDAPPSD_ADDRESS', "tcp://gridappsd:61613"), - "GRIDAPPSD_USER": os.environ.get('GRIDAPPSD_USER'), - "GRIDAPPSD_PASSWORD": os.environ.get('GRIDAPPSD_PASSWORD'), - "GRIDAPPSD_APPLICATION_ID": os.environ.get('GRIDAPPSD_APPLICATION_ID') +def get_message_bus_definition(area_id: str) -> MessageBusDefinition: + connection_args: dict[str, str | int | bool | None] = { + "GRIDAPPSD_ADDRESS": os.environ.get("GRIDAPPSD_ADDRESS", "tcp://gridappsd:61613"), + "GRIDAPPSD_USER": os.environ.get("GRIDAPPSD_USER"), + "GRIDAPPSD_PASSWORD": os.environ.get("GRIDAPPSD_PASSWORD"), + "GRIDAPPSD_APPLICATION_ID": os.environ.get("GRIDAPPSD_APPLICATION_ID"), } - bus = MessageBusDefinition(id=area_id, - is_ot_bus=True, - connection_type=ConnectionType.CONNECTION_TYPE_GRIDAPPSD, - connection_args=connection_args) + bus = MessageBusDefinition( + id=area_id, + is_ot_bus=True, + connection_type=ConnectionType.CONNECTION_TYPE_GRIDAPPSD, + connection_args=connection_args, + ) return bus diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/field_proxy_forwarder.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/field_proxy_forwarder.py index 7203f58..b575efc 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/field_proxy_forwarder.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/field_proxy_forwarder.py @@ -1,27 +1,37 @@ import stomp import json import time -from typing import Callable, Dict from gridappsd import GridAPPSD from gridappsd import topics -from cimgraph.databases import GridappsdConnection, BlazegraphConnection -from cimgraph.models import BusBranchModel, FeederModel +try: + from importlib.metadata import version as _pkg_version + + _STOMP_V8 = int(_pkg_version("stomp-py").split(".")[0]) >= 8 +except Exception: + _STOMP_V8 = False + +from cimgraph.databases import BlazegraphConnection +from cimgraph.models import BusBranchModel import os -import cimgraph.utils as utils import cimgraph.data_profile.cimhub_ufls as cim REQUEST_FIELD = ".".join((topics.PROCESS_PREFIX, "request.field")) -class FieldListener: +class FieldListener: def __init__(self, ot_connection: GridAPPSD, proxy_connection: stomp.Connection): self.ot_connection = ot_connection self.proxy_connection = proxy_connection - def on_message(self, headers, message): + def on_message(self, *args): "Receives messages coming from Proxy bus (e.g. ARTEMIS) and forwards to OT bus" + if _STOMP_V8: + frame = args[0] + headers, message = frame.headers, frame.body + else: + headers, message = args[0], args[1] try: print(f"Received message at Proxy. destination: {headers['destination']}, message: {headers}") @@ -35,87 +45,80 @@ def on_message(self, headers, message): request_data = json.loads(message) request_type = request_data.get("request_type") if request_type == "get_context": - response = self.ot_connection.get_response(headers["destination"],message) - self.proxy_connection.send(headers["reply-to"],response) + response = self.ot_connection.get_response(headers["destination"], message) + self.proxy_connection.send(headers["reply-to"], response) elif request_type == "start_publishing": - response = self.ot_connection.get_response(headers["destination"],message) - self.proxy_connection.send(headers["reply-to"],json.dumps(response)) - + response = self.ot_connection.get_response(headers["destination"], message) + self.proxy_connection.send(headers["reply-to"], json.dumps(response)) + else: print(f"Unrecognized message received by Proxy: {message}") except Exception as e: print(f"Error processing message: {e}") + class FieldProxyForwarder: """ FieldProxyForwarder acts as a bridge between field bus and OT bus when direct connection is not possible. """ - def __init__(self, connection_url: str, username: str, password: str, mrid :str): - - #Connect to OT + def __init__(self, connection_url: str, username: str, password: str, mrid: str): + # Connect to OT self.ot_connection = GridAPPSD() - #Connect to proxy + # Connect to proxy self.broker_url = connection_url self.username = username self.password = password - self.proxy_connection = stomp.Connection([(self.broker_url.split(":")[0], int(self.broker_url.split(":")[1]))],keepalive=True, heartbeats=(10000,10000)) - self.proxy_connection.set_listener('', FieldListener(self.ot_connection, self.proxy_connection)) + self.proxy_connection = stomp.Connection( + [(self.broker_url.split(":")[0], int(self.broker_url.split(":")[1]))], + keepalive=True, + heartbeats=(10000, 10000), + ) + self.proxy_connection.set_listener("", FieldListener(self.ot_connection, self.proxy_connection)) self.proxy_connection.connect(self.username, self.password, wait=True) - - print('Connected to Proxy') + print("Connected to Proxy") + # Subscribe to messages from field + self.proxy_connection.subscribe(destination=topics.BASE_FIELD_TOPIC + ".*", id=1, ack="auto") + self.proxy_connection.subscribe(destination="goss.gridappsd.process.request.*", id=2, ack="auto") - #Subscribe to messages from field - self.proxy_connection.subscribe(destination=topics.BASE_FIELD_TOPIC+'.*', id=1, ack="auto") - self.proxy_connection.subscribe(destination='goss.gridappsd.process.request.*', id=2, ack="auto") - - #Subscribe to messages on OT bus + # Subscribe to messages on OT bus self.ot_connection.subscribe(topics.field_input_topic(), self.on_message_from_ot) - - - os.environ['CIMG_CIM_PROFILE'] = 'cimhub_ufls' - os.environ['CIMG_URL'] = 'http://localhost:8889/bigdata/namespace/kb/sparql' - os.environ['CIMG_DATABASE'] = 'powergridmodel' - os.environ['CIMG_NAMESPACE'] = 'http://iec.ch/TC57/CIM100#' - os.environ['CIMG_IEC61970_301'] = '8' - os.environ['CIMG_USE_UNITS'] = 'False' + os.environ["CIMG_CIM_PROFILE"] = "cimhub_ufls" + os.environ["CIMG_URL"] = "http://localhost:8889/bigdata/namespace/kb/sparql" + os.environ["CIMG_DATABASE"] = "powergridmodel" + os.environ["CIMG_NAMESPACE"] = "http://iec.ch/TC57/CIM100#" + os.environ["CIMG_IEC61970_301"] = "8" + os.environ["CIMG_USE_UNITS"] = "False" self.database = BlazegraphConnection() distribution_area = cim.DistributionArea(mRID=mrid) - self.network = BusBranchModel( - connection=self.database, - container=distribution_area, - distributed=False) + self.network = BusBranchModel(connection=self.database, container=distribution_area, distributed=False) self.network.get_all_edges(cim.DistributionArea) self.network.get_all_edges(cim.Substation) - for substation in self.network.graph.get(cim.Substation,{}).values(): - print(f'Subscribing to Substation: /topic/goss.gridappsd.field.{substation.mRID}') - self.ot_connection.subscribe('/topic/goss.gridappsd.field.'+substation.mRID, self.on_message_from_ot) - - - - #self.ot_connection.subscribe(topics.BASE_FIELD_TOPIC, self.on_message_from_ot) + for substation in self.network.graph.get(cim.Substation, {}).values(): + mrid = substation.mRID # type: ignore[attr-defined] + print(f"Subscribing to Substation: /topic/goss.gridappsd.field.{mrid}") + self.ot_connection.subscribe("/topic/goss.gridappsd.field." + mrid, self.on_message_from_ot) + # self.ot_connection.subscribe(topics.BASE_FIELD_TOPIC, self.on_message_from_ot) def on_message_from_ot(self, headers, message): - "Receives messages coming from OT bus (GridAPPS-D) and forwards to Proxy bus" try: print(f"Received message from OT: {message}") if headers["destination"] == topics.field_input_topic(): - self.proxy_connection.send(topics.field_input_topic(),json.dumps(message)) - - elif 'goss.gridappsd.field' in headers["destination"]: + self.proxy_connection.send(topics.field_input_topic(), json.dumps(message)) - self.proxy_connection.send(headers["destination"],json.dumps(message)) + elif "goss.gridappsd.field" in headers["destination"]: + self.proxy_connection.send(headers["destination"], json.dumps(message)) else: print(f"Unrecognized message received by OT: {message}") @@ -125,6 +128,7 @@ def on_message_from_ot(self, headers, message): if __name__ == "__main__": import argparse + parser = argparse.ArgumentParser(prog="TestForwarder") parser.add_argument("username") parser.add_argument("passwd") diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/gridappsd_field_bus.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/gridappsd_field_bus.py index 5a0fd61..ec05638 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/gridappsd_field_bus.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/gridappsd_field_bus.py @@ -5,7 +5,6 @@ class GridAPPSDMessageBus(FieldMessageBus): - def __init__(self, definition: MessageBusDefinition): super().__init__(definition) self._id = definition.id @@ -18,13 +17,13 @@ def __init__(self, definition: MessageBusDefinition): self.gridappsd_obj = None def query_devices(self) -> dict: - pass + return {} def is_connected(self) -> bool: """ Is this object connected to the message bus """ - pass + return self.gridappsd_obj is not None and self.gridappsd_obj.connected def connect(self): """ diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/interfaces.py b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/interfaces.py index 3dedc5c..1044f9a 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/interfaces.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/field_interface/interfaces.py @@ -8,7 +8,7 @@ import logging from os import PathLike from pathlib import Path -from typing import Dict, List, Optional, Union, Any +from typing import Any, Iterator import yaml @@ -37,7 +37,6 @@ class ConnectionType(Enum): class ProtocolTransformer(ABC): - @staticmethod @abstractmethod def to_cim(data) -> str: @@ -53,7 +52,7 @@ def to_cim(data) -> str: @staticmethod @abstractmethod - def to_protocol(cim_data: str, from_format: Optional[str] = None): + def to_protocol(cim_data: str, from_format: str | None = None): """ Change passed cim data into a protocol complient data stream and return it. @@ -70,6 +69,7 @@ class MessageBusDefinition: A `MessageBusDefinition` class is used to define how to connect to the message bus. """ + """ A global unique string representing a specific message bus. """ @@ -81,7 +81,7 @@ class MessageBusDefinition: """ connection_args allows dynamic key/value paired strings to be added to allow connections. """ - connection_args: Dict[str, str | int] + connection_args: dict[str, str | int | bool | None] """ Determines whether or not this message bus has the role of ot bus. @@ -98,7 +98,7 @@ def __validate_loader__(json_obj: dict[str, Any]) -> bool: return True @staticmethod - def load_from_json(json_obj: dict[str, str | dict]) -> MessageBusDefinition: + def load_from_json(json_obj: dict[str, Any]) -> MessageBusDefinition: MessageBusDefinition.__validate_loader__(json_obj) mb_def = MessageBusDefinition(**json_obj) @@ -112,15 +112,14 @@ def load(config_file) -> MessageBusDefinition: """ Load a single message bus definition from a YAML file. """ - config = yaml.load(open(config_file), Loader=yaml.FullLoader)['connections'] - + with open(config_file) as f: + config = yaml.load(f, Loader=yaml.FullLoader)["connections"] return MessageBusDefinition.load_from_json(config) class FieldMessageBus: - def __init__(self, config: MessageBusDefinition): - self._devices = dict() + self._devices: dict[str, DeviceFieldInterface] = {} self._is_ot_bus = config.is_ot_bus self._id = config.id @@ -202,25 +201,25 @@ def create(config: MessageBusDefinition) -> FieldMessageBus: """ Create a message bus based upon the configuration passed. """ - try: - module_name, class_name = config.connection_type.value.rsplit('.', 1) - except AttributeError: - module_name, class_name = config.connection_type.rsplit('.', 1) + connection_type = config.connection_type + if isinstance(connection_type, ConnectionType): + type_value = connection_type.value + else: + type_value = str(connection_type) + module_name, class_name = type_value.rsplit(".", 1) module = importlib.import_module(module_name) bus_class = getattr(module, class_name) - return bus_class(config) - + return bus_class(config) # type: ignore[no-any-return] class MessageBusDefinitions: - def __init__( self, - config: Optional[Union[dict, str]] = None, - yamlfile: Optional[Union[str, PathLike]] = None, + config: dict[str, Any] | str | None = None, + yamlfile: str | PathLike[str] | None = None, ): - self._buses = dict() + self._buses: dict[str, MessageBusDefinition] = {} if config is None and yamlfile is None: raise ValueError("Must have either config specified") @@ -228,46 +227,43 @@ def __init__( if config and yamlfile: raise ValueError("Must have at least one of config or yamlfile specified.") + parsed_config: dict[str, Any] if yamlfile: if not Path(yamlfile).exists(): raise ValueError(f"Invalid path for yamlfile {yamlfile}") with open(yamlfile) as fp: - config = yaml.safe_load(fp) + parsed_config = yaml.safe_load(fp) elif isinstance(config, str): - config = yaml.load(config) + parsed_config = yaml.safe_load(config) + else: + parsed_config = config # type: ignore[assignment] - if config.get("connections"): - for con in config.get("connections"): + if parsed_config.get("connections"): + for con in parsed_config.get("connections", []): obj = MessageBusDefinition.load(con) if self._buses.get(obj.id): raise ValueError(f"Duplicate messagebus id specified for {obj.id}") self._buses[obj.id] = obj else: - obj = MessageBusDefinition.load(config) + obj = MessageBusDefinition.load(parsed_config) self._buses[obj.id] = obj - self._iterator = None + self._iterator: Iterator[str] | None = None - def get(self, id: str) -> Union[MessageBusDefinition, None]: + def get(self, id: str) -> MessageBusDefinition | None: return self._buses.get(id) - def __iter__(self): - if self._iterator is None: - self._iterator = iter(self._buses) + def __iter__(self) -> Iterator[str]: + self._iterator = iter(self._buses) return self._iterator - def __next__(self) -> MessageBusDefinition: - try: - definition = next(self._iterator) - except StopIteration: - self._iterator = None - return None - else: - return definition + def __next__(self) -> str: + if self._iterator is None: + raise StopIteration + return next(self._iterator) class DeviceFieldInterface: - def __init__( self, id: str, diff --git a/gridappsd-field-bus-lib/gridappsd_field_bus/forwarder.py b/gridappsd-field-bus-lib/gridappsd_field_bus/forwarder.py index 79b0c75..66ed250 100644 --- a/gridappsd-field-bus-lib/gridappsd_field_bus/forwarder.py +++ b/gridappsd-field-bus-lib/gridappsd_field_bus/forwarder.py @@ -2,39 +2,45 @@ from dotenv import load_dotenv import click -import yaml import os import urllib -from gridappsd_field_bus import MessageBusDefinition from gridappsd_field_bus.field_interface.field_proxy_forwarder import FieldProxyForwarder @click.command() -@click.option('--username', - default=lambda: os.getenv("GRIDAPPSD_USER"), - metavar='USERNAME', - type=str, - help='Username for the connection.', - show_default="from environment variable GRIDAPPSD_USER") -@click.option('--password', - metavar='PASSWORD', - type=str, - default=lambda: os.getenv("GRIDAPPSD_PASSWORD"), - help='Password for the connection.', - show_default="from environment variable GRIDAPPSD_PASSWORD") -@click.option('--connection_url', - default=lambda: os.getenv("GRIDAPPSD_ADDRESS"), - type=str, - metavar='URL', - show_default="from environment variable GRIDAPPSD_ADDRESS", - help='Connection URL.') +@click.option( + "--username", + default=lambda: os.getenv("GRIDAPPSD_USER"), + metavar="USERNAME", + type=str, + help="Username for the connection.", + show_default="from environment variable GRIDAPPSD_USER", +) +@click.option( + "--password", + metavar="PASSWORD", + type=str, + default=lambda: os.getenv("GRIDAPPSD_PASSWORD"), + help="Password for the connection.", + show_default="from environment variable GRIDAPPSD_PASSWORD", +) +@click.option( + "--connection_url", + default=lambda: os.getenv("GRIDAPPSD_ADDRESS"), + type=str, + metavar="URL", + show_default="from environment variable GRIDAPPSD_ADDRESS", + help="Connection URL.", +) def start_forwarder(username, password, connection_url): """Start the field proxy forwarder with either a YAML configuration or cmd-line arguments.""" required = [username, password, connection_url] if not all(required): - click.echo("Username, password, and connection URL must be provided either through environment variables or command-line arguments.") + click.echo( + "Username, password, and connection URL must be provided either through environment variables or command-line arguments." + ) click.Abort() parsed = urllib.parse.urlparse(connection_url) @@ -45,12 +51,11 @@ def start_forwarder(username, password, connection_url): # Use command-line arguments click.echo(f"Using command line arguments: {username}, {password}, {connection_url}") - proxy_forwarder = FieldProxyForwarder(username, password, connection_url) + FieldProxyForwarder(username, password, connection_url, None) time.sleep(0.1) - -if __name__ == '__main__': +if __name__ == "__main__": load_dotenv() - start_forwarder() \ No newline at end of file + start_forwarder() diff --git a/gridappsd-field-bus-lib/pyproject.toml b/gridappsd-field-bus-lib/pyproject.toml index 0b6dd67..fc8f87f 100644 --- a/gridappsd-field-bus-lib/pyproject.toml +++ b/gridappsd-field-bus-lib/pyproject.toml @@ -1,64 +1,33 @@ -[tool.poetry] +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] name = "gridappsd-field-bus" -version = "2025.6.0" +version = "2026.1.1b2" description = "GridAPPS-D Field Bus Implementation" +readme = "README.md" +license = "BSD-3-Clause" +requires-python = ">=3.10,<4.0" authors = [ - "C. Allwardt <3979063+craig8@users.noreply.github.com>", - "P. Sharma =2025.3.1a1", + "cim-graph>=0.4.3a6", + "click>=8.1", ] +[project.urls] +Repository = "https://github.com/GRIDAPPSD/gridappsd-python" +Homepage = "https://gridappsd.readthedocs.io" -[tool.poetry.dependencies] -python = ">=3.10,<4.0" -gridappsd-python = ">=2025.3.1a1" -cim-graph = ">=0.2.2a4" -click = "^8.1.8" - -[tool.poetry.scripts] -# Add things in the form -# myscript = 'my_package:main' -start-field-bus-forwarder = 'gridappsd_field_bus.forwarder:start_forwarder' -context_manager = 'gridappsd_field_bus.field_interface.context_managers.centralized_context_managers:_main' - -[tool.poetry.group.dev.dependencies] -pytest = "^8.3.4" -pytest-html = "^3.1.1" -mock = "^4.0.3" -docker = "^4.4.4" -yapf = "^0.32.0" -pre-commit = "^4.1.0" - -[build-system] -requires = ["poetry-core>=1.2.0"] -build-backend = "poetry.core.masonry.api" - -[tool.yapfignore] -ignore_patterns = [ - ".venv/**", - ".pytest_cache/**", - "dist/**", - "docs/**" -] - -[tool.yapf] -based_on_style = "pep8" -spaces_before_comment = 4 -column_limit = 99 -split_before_logical_operator = true +[project.scripts] +start-field-bus-forwarder = "gridappsd_field_bus.forwarder:start_forwarder" +context_manager = "gridappsd_field_bus.field_interface.context_managers.centralized_context_managers:_main" -[tool.poetry.requires-plugins] -poetry-plugin-export = ">=1.8" +[tool.hatch.build.targets.wheel] +packages = ["gridappsd_field_bus"] diff --git a/gridappsd-python-lib/gridappsd/__init__.py b/gridappsd-python-lib/gridappsd/__init__.py index 3a36d69..7b9debf 100644 --- a/gridappsd-python-lib/gridappsd/__init__.py +++ b/gridappsd-python-lib/gridappsd/__init__.py @@ -37,20 +37,21 @@ # PACIFIC NORTHWEST NATIONAL LABORATORY operated by BATTELLE for the # UNITED STATES DEPARTMENT OF ENERGY under Contract DE-AC05-76RL01830 # ------------------------------------------------------------------------------- -""" GridAPPSD Python Connection Library +"""GridAPPSD Python Connection Library The :mod:`gridappsd` contains a connection class :class:`gridappsd.GridAPPSD` for connecting with the main GridAPPSD executable. """ + import pathlib import typing StrPath = typing.Union[str, pathlib.Path] -from gridappsd.goss import GOSS -from gridappsd.utils import ProcessStatusEnum -from gridappsd.gridappsd import GridAPPSD -from gridappsd.difference_builder import DifferenceBuilder -from gridappsd.app_registration import ApplicationController -import gridappsd.json_extension as json +from gridappsd.goss import GOSS as GOSS +from gridappsd.utils import ProcessStatusEnum as ProcessStatusEnum +from gridappsd.gridappsd import GridAPPSD as GridAPPSD +from gridappsd.difference_builder import DifferenceBuilder as DifferenceBuilder +from gridappsd.app_registration import ApplicationController as ApplicationController +from gridappsd import json_extension as json diff --git a/gridappsd-python-lib/gridappsd/__main__.py b/gridappsd-python-lib/gridappsd/__main__.py index fe35629..5bda4f7 100644 --- a/gridappsd-python-lib/gridappsd/__main__.py +++ b/gridappsd-python-lib/gridappsd/__main__.py @@ -48,28 +48,30 @@ from pathlib import Path from gridappsd import GridAPPSD +from gridappsd.simulation import Simulation assert sys.version_info >= (3, 10), "Minimum version is python 3.10" -logging.basicConfig(stream=sys.stdout, - level=logging.INFO, - format="'%(asctime)s: %(name)-20s - %(levelname)-6s - %(message)s") +logging.basicConfig( + stream=sys.stdout, level=logging.INFO, format="'%(asctime)s: %(name)-20s - %(levelname)-6s - %(message)s" +) -logging.getLogger('stomp.py').setLevel(logging.WARNING) +logging.getLogger("stomp.py").setLevel(logging.WARNING) _log = logging.getLogger("gridappsd.__main__") -if __name__ == '__main__': - +if __name__ == "__main__": parser = ArgumentParser() group = parser.add_mutually_exclusive_group(required=True) - group.add_argument("-s", - "--run-simulation", - type=argparse.FileType('r'), - help="Start running a simulation from a passed simulation file.") - group.add_argument("--env", required=False, type=str, - default=".env", - help="Load environment variables from a .env file.") + group.add_argument( + "-s", + "--run-simulation", + type=argparse.FileType("r"), + help="Start running a simulation from a passed simulation file.", + ) + group.add_argument( + "--env", required=False, type=str, default=".env", help="Load environment variables from a .env file." + ) opts = parser.parse_args() if opts.run_simulation: @@ -91,8 +93,6 @@ def next_timestep(simulation, timestep): gappsd = GridAPPSD() run_args = yaml.safe_load(opts.run_simulation) - # if wanting to use the above next_timestep function use this - # instead of the one below. - # simulation = gappsd.run_simulation(run_args, next_timestep) - simulation = gappsd.run_simulation(run_args) - simulation.simulation_main_loop() + # Create and start the simulation + simulation = Simulation(gappsd, run_args) + simulation.start_simulation() diff --git a/gridappsd-python-lib/gridappsd/app_registration.py b/gridappsd-python-lib/gridappsd/app_registration.py index baf4562..dfdf6d8 100644 --- a/gridappsd-python-lib/gridappsd/app_registration.py +++ b/gridappsd-python-lib/gridappsd/app_registration.py @@ -1,17 +1,12 @@ -#import json +# import json import logging import os -try: - from queue import Queue -except ImportError: - from Queue import Queue +from queue import Queue import time -import select import subprocess import threading import shlex import sys -import os from .gridappsd import GridAPPSD from .topics import REQUEST_REGISTER_APP @@ -21,12 +16,11 @@ # determine OS type posix = False -if os.name == 'posix': +if os.name == "posix": posix = True class Job(threading.Thread): - def __init__(self, args, out=sys.stdout, err=sys.stderr): threading.Thread.__init__(self) _log.debug("Creating job") @@ -41,31 +35,30 @@ def shutdown(self): def run(self): try: self.running = True - os.environ['GRIDAPPSD_APPLICATION_STATUS'] = 'RUNNING' + os.environ["GRIDAPPSD_APPLICATION_STATUS"] = "RUNNING" p = subprocess.Popen(args=self._args, shell=False, stdout=self._out, stderr=self._err) # Loop while process is executing while p.poll() is None and self.running: - os.environ['GRIDAPPSD_APPLICATION_STATUS'] = 'RUNNING' + os.environ["GRIDAPPSD_APPLICATION_STATUS"] = "RUNNING" time.sleep(1) except Exception as e: - os.environ['GRIDAPPSD_APPLICATION_STATUS'] = 'ERROR' + os.environ["GRIDAPPSD_APPLICATION_STATUS"] = "ERROR" _log.error(repr(e)) else: - os.environ['GRIDAPPSD_APPLICATION_STATUS'] = 'STOPPED' + os.environ["GRIDAPPSD_APPLICATION_STATUS"] = "STOPPED" class ApplicationController(object): - def __init__(self, config, gridappsd=None, heatbeat_period=10): if not isinstance(config, dict): raise ValueError("Config should be dictionary") if not isinstance(gridappsd, GridAPPSD): raise ValueError("Invalid gridappsd instance passed.") - os.environ['GRIDAPPSD_APPLICATION_STATUS'] = 'STOPPED' + os.environ["GRIDAPPSD_APPLICATION_STATUS"] = "STOPPED" self._configDict = config.copy() self._validate_config() self._gapd = gridappsd @@ -87,15 +80,16 @@ def __init__(self, config, gridappsd=None, heatbeat_period=10): self._end_callback = None self._print_queue = Queue() self._heartbeat_thread = None - os.environ['GRIDAPPSD_APPLICATION_STATUS'] = 'STOPPED' + os.environ["GRIDAPPSD_APPLICATION_STATUS"] = "STOPPED" - if "type" not in self._configDict or self._configDict['type'] != 'REMOTE': - _log.warning('Setting type to REMOTE you can remove this error by putting ' - '"type": "REMOTE" in the app config file.') - self._configDict['type'] = 'REMOTE' + if "type" not in self._configDict or self._configDict["type"] != "REMOTE": + _log.warning( + 'Setting type to REMOTE you can remove this error by putting "type": "REMOTE" in the app config file.' + ) + self._configDict["type"] = "REMOTE" def _validate_config(self): - required = ['id', 'execution_path'] + required = ["id", "execution_path"] missing = [x for x in required if x not in self._configDict] if missing: @@ -114,30 +108,30 @@ def register_app(self, end_callback): self._gapd.get_logger().debug("Started App Registration") response = self._gapd.get_response(REQUEST_REGISTER_APP, self._configDict, 60) - if 'message' in response: + if "message" in response: _log.error("An error regisering the application occured") - _log.error(response.get('message')) - raise ValueError(response.get('message')) - self._application_id = response.get('applicationId') - self._heartbeat_topic = response.get('heartbeatTopic') - self._heartbeat_period = response.get('heartbeatPeriod', 10) - self._start_control_topic = response.get('startControlTopic') - self._stop_control_topic = response.get('stopControlTopic') + _log.error(response.get("message")) + raise ValueError(response.get("message")) + self._application_id = response.get("applicationId") + self._heartbeat_topic = response.get("heartbeatTopic") + self._heartbeat_period = response.get("heartbeatPeriod", 10) + self._start_control_topic = response.get("startControlTopic") + self._stop_control_topic = response.get("stopControlTopic") os.environ["GRIDAPPSD_APPLICATION_ID"] = self._application_id - os.environ['GRIDAPPSD_APPLICATION_STATUS'] = 'STOPPED' + os.environ["GRIDAPPSD_APPLICATION_STATUS"] = "STOPPED" self._gapd.subscribe(self._stop_control_topic, self.__handle_stop) self._gapd.subscribe(self._start_control_topic, self.__handle_start) self._end_callback = end_callback # TODO assuming good response start the heartbeat - self._heartbeat_thread = threading.Thread(target=self.__start_heartbeat, - args=[self.__heartbeat_error]) + self._heartbeat_thread = threading.Thread(target=self.__start_heartbeat, args=[self.__heartbeat_error]) self._heartbeat_thread.daemon = True self._heartbeat_thread.start() - self._gapd.get_logger().debug("Heartbeat registereed for application {}".format( - utils.get_gridappsd_application_id())) + self._gapd.get_logger().debug( + "Heartbeat registereed for application {}".format(utils.get_gridappsd_application_id()) + ) def __heartbeat_error(self): self._heartbeat_thread = None @@ -151,8 +145,7 @@ def __start_heartbeat(self, error_callback): # print("Seanding heartbeat {} {}".format(self._heartbeat_topic, self._application_id)) # print("Heartbeat period: {}".format(self._heartbeat_period)) self._gapd.send(self._heartbeat_topic, self._application_id) - time.sleep(self._heartbeat_period - - ((time.time() - starttime) % self._heartbeat_period)) + time.sleep(self._heartbeat_period - ((time.time() - starttime) % self._heartbeat_period)) except: error_callback() @@ -167,15 +160,15 @@ def __handle_start(self, headers, message): obj = json.loads(message) else: obj = message - os.environ['GRIDAPPSD_APPLICATION_STATUS'] = 'STARTING' + os.environ["GRIDAPPSD_APPLICATION_STATUS"] = "STARTING" self._gapd.get_logger().debug("Handling Start: {}\ndict:\n{}".format(headers, obj)) - if 'command' not in obj: + if "command" not in obj: # Send log to gridappsd _log.error("Invalid message sent on start app.") else: _log.debug("CWD IS: {}".format(os.getcwd())) - args = shlex.split(obj['command']) + args = shlex.split(obj["command"]) job = Job(args) job.daemon = True job.start() @@ -183,12 +176,12 @@ def __handle_start(self, headers, message): def __handle_stop(self, headers, message): print("Handling Stop: {} {}".format(headers, message)) - os.environ['GRIDAPPSD_APPLICATION_STATUS'] = 'STOPPING' + os.environ["GRIDAPPSD_APPLICATION_STATUS"] = "STOPPING" if self._thread: self._thread.join() if self._end_callback is not None: self._end_callback() - os.environ['GRIDAPPSD_APPLICATION_STATUS'] = 'STOPPED' + os.environ["GRIDAPPSD_APPLICATION_STATUS"] = "STOPPED" def shutdown(self): self._shutting_down = True diff --git a/gridappsd-python-lib/gridappsd/difference_builder.py b/gridappsd-python-lib/gridappsd/difference_builder.py index d7f585e..bf52302 100644 --- a/gridappsd-python-lib/gridappsd/difference_builder.py +++ b/gridappsd-python-lib/gridappsd/difference_builder.py @@ -44,43 +44,46 @@ class DifferenceBuilder(object): - """ Automates the building of forward and reverse cim differences - - """ + """Automates the building of forward and reverse cim differences""" def __init__(self, simulation_id: str | int | None = None): - self._simulation_id = simulation_id self._forward: list[dict] = [] self._reverse: list[dict] = [] def add_difference(self, object_id, attribute, forward_value, reverse_value): - """ Add forward and reverse unit for an object attribute. + """Add forward and reverse unit for an object attribute. - All of the parameters must be serializable for sending the GOSS message bus. - """ + All of the parameters must be serializable for sending the GOSS message bus. + """ forward = dict(object=object_id, attribute=attribute, value=forward_value) reverse = dict(object=object_id, attribute=attribute, value=reverse_value) self._forward.append(forward) self._reverse.append(reverse) def clear(self): - """ Clear the forward and reverse differences """ + """Clear the forward and reverse differences""" self._forward = [] self._reverse = [] def get_message(self, epoch=None): - """ Get the message to send to the GOSS message bus + """Get the message to send to the GOSS message bus :param epoch: The epoch time to use for the message timestamp. If None, the current time (GMT) is used. """ if epoch is None: epoch = calendar.timegm(time.gmtime()) - msg = dict(command="update", - input=dict(message=dict(timestamp=epoch, - difference_mrid=str(uuid4()), - reverse_differences=self._reverse, - forward_differences=self._forward))) + msg = dict( + command="update", + input=dict( + message=dict( + timestamp=epoch, + difference_mrid=str(uuid4()), + reverse_differences=self._reverse, + forward_differences=self._forward, + ) + ), + ) if self._simulation_id is not None: - msg['input']['simulation_id'] = self._simulation_id + msg["input"]["simulation_id"] = self._simulation_id return msg.copy() diff --git a/gridappsd-python-lib/gridappsd/field_interface/__init__.py b/gridappsd-python-lib/gridappsd/field_interface/__init__.py index a223e59..3ddd563 100644 --- a/gridappsd-python-lib/gridappsd/field_interface/__init__.py +++ b/gridappsd-python-lib/gridappsd/field_interface/__init__.py @@ -1,32 +1,52 @@ import logging import sys +import warnings _log = logging.getLogger(__name__) -try: - import warnings +# These will be populated if gridappsd-field-bus is installed +context = None +context_managers = None +agents = None +field_proxy_forwarder = None +gridappsd_field_bus = None +interfaces = None +try: import gridappsd_field_bus.field_interface.context as _context import gridappsd_field_bus.field_interface.context_managers as _context_managers import gridappsd_field_bus.field_interface as _field_interface import gridappsd_field_bus.field_interface.agents as _agents - import gridappsd_field_bus.field_interface.field_proxy_forwarder as _field_proxy_forwarder import gridappsd_field_bus.field_interface.gridappsd_field_bus as _gridappsd_field_bus import gridappsd_field_bus.field_interface.interfaces as _interfaces - sys.modules['gridappsd.field_interface'] = _field_interface - sys.modules['gridappsd.field_interface.interfaces'] = _interfaces - sys.modules['gridappsd.field_interface.context_managers'] = _context_managers - sys.modules['gridappsd_.context_managers'] = _context_managers - sys.modules['gridappsd.field_interface.agents'] = _agents - sys.modules['gridappsd.field_interface.field_proxy_forwarder'] = _field_proxy_forwarder - sys.modules['gridappsd.field_interface.gridappsd_field_bus'] = _gridappsd_field_bus - - - - - - warnings.warn(message="gridappsd.field_interface is deprecated and will be removed in a future release. Use gridappsd_field_bus.field_interface instead.", - category=DeprecationWarning) -except ImportError: - _log.error("Could not import field_interface install gridappsd-field-bus to get those functions.") + # Expose as module attributes for `from gridappsd.field_interface import X` syntax + context = _context + context_managers = _context_managers + agents = _agents + gridappsd_field_bus = _gridappsd_field_bus + interfaces = _interfaces + + # Also register in sys.modules for backwards compatibility + sys.modules["gridappsd.field_interface.interfaces"] = _interfaces + sys.modules["gridappsd.field_interface.context"] = _context + sys.modules["gridappsd.field_interface.context_managers"] = _context_managers + sys.modules["gridappsd.field_interface.agents"] = _agents + sys.modules["gridappsd.field_interface.gridappsd_field_bus"] = _gridappsd_field_bus + + # field_proxy_forwarder has optional dependencies that may not be available + try: + import gridappsd_field_bus.field_interface.field_proxy_forwarder as _field_proxy_forwarder + + field_proxy_forwarder = _field_proxy_forwarder + sys.modules["gridappsd.field_interface.field_proxy_forwarder"] = _field_proxy_forwarder + except ImportError: + _log.debug("field_proxy_forwarder not available (missing optional dependencies)") + + warnings.warn( + message="gridappsd.field_interface is deprecated and will be removed in a future release. " + "Use gridappsd_field_bus.field_interface instead.", + category=DeprecationWarning, + ) +except ImportError as e: + _log.error(f"Could not import field_interface: {e}. Install gridappsd-field-bus to get those functions.") diff --git a/gridappsd-python-lib/gridappsd/goss.py b/gridappsd-python-lib/gridappsd/goss.py index 869f24c..503b046 100644 --- a/gridappsd-python-lib/gridappsd/goss.py +++ b/gridappsd-python-lib/gridappsd/goss.py @@ -42,9 +42,11 @@ @author: Craig Allwardt """ + import base64 import inspect -#import json + +# import json import logging import os import random @@ -55,6 +57,7 @@ from logging import Logger from queue import Queue +import stomp as _stomp_module from stomp import Connection12 as Connection from stomp.exception import NotConnectedException from time import sleep @@ -63,6 +66,20 @@ _log: Logger = logging.getLogger(inspect.getmodulename(__file__)) +# stomp.py 8.x changed listener callbacks from (headers, body) to (frame) +_stomp_major = ( + int(getattr(_stomp_module, "__version__", (0,))[0]) + if isinstance(getattr(_stomp_module, "__version__", None), tuple) + else 0 +) +try: + from importlib.metadata import version as _pkg_version + + _stomp_major = int(_pkg_version("stomp-py").split(".")[0]) +except Exception: + pass +_STOMP_V8 = _stomp_major >= 8 + class GRIDAPPSD_ENV_ENUM(Enum): GRIDAPPSD_USER = "GRIDAPPSD_USER" @@ -78,22 +95,22 @@ class TimeoutError(Exception): class GOSS(object): - """ Base class providing connections to a GOSS instance via stomp protocol - """ - - def __init__(self, - username=None, - password=None, - stomp_address='localhost', - stomp_port='61613', - attempt_connection=True, - override_threading=None, - stomp_log_level=logging.WARNING, - goss_log_level=logging.INFO, - use_auth_token=True): - - logging.getLogger('stomp.py').setLevel(stomp_log_level) - logging.getLogger('goss').setLevel(goss_log_level) + """Base class providing connections to a GOSS instance via stomp protocol""" + + def __init__( + self, + username=None, + password=None, + stomp_address="localhost", + stomp_port="61613", + attempt_connection=True, + override_threading=None, + stomp_log_level=logging.WARNING, + goss_log_level=logging.INFO, + use_auth_token=True, + ): + logging.getLogger("stomp.py").setLevel(stomp_log_level) + logging.getLogger("goss").setLevel(goss_log_level) self.__user__ = username self.__pass__ = password @@ -154,12 +171,9 @@ def send(self, topic, message): if isinstance(message, list) or isinstance(message, dict): message = json.dumps(message) _log.debug("Sending topic: {} body: {}".format(topic, message)) - self._conn.send(body=message, - destination=topic, - headers={ - 'GOSS_HAS_SUBJECT': True, - 'GOSS_SUBJECT': self.__token - }) + self._conn.send( + body=message, destination=topic, headers={"GOSS_HAS_SUBJECT": True, "GOSS_SUBJECT": self.__token} + ) def get_response(self, topic, message, timeout=5): id = datetime.now().strftime("%Y%m%d%h%M%S%f")[:-3] @@ -168,8 +182,8 @@ def get_response(self, topic, message, timeout=5): if isinstance(message, str): message = json.loads(message) - if 'resultFormat' in message: - self.result_format = message['resultFormat'] + if "resultFormat" in message: + self.result_format = message["resultFormat"] # Change message to string if we have a dictionary. if isinstance(message, dict): @@ -178,17 +192,20 @@ def get_response(self, topic, message, timeout=5): message = json.dumps(message) class ResponseListener(object): - def __init__(self, topic, result_format): self.response = None self._topic = topic self.result_format = result_format - def on_message(self, header, message): - + def on_message(self, *args): + if _STOMP_V8: + frame = args[0] + header, message = frame.headers, frame.body + else: + header, message = args[0], args[1] _log.debug("Internal on message is: {} {}".format(header, message)) try: - if self.result_format == 'JSON': + if self.result_format == "JSON": if isinstance(message, dict): self.response = message else: @@ -196,27 +213,25 @@ def on_message(self, header, message): else: self.response = message except ValueError: - self.response = dict(error="Invalid json returned", - header=header, - message=message) + self.response = dict(error="Invalid json returned", header=header, message=message) - def on_error(self, headers, message): + def on_error(self, *args): + if _STOMP_V8: + frame = args[0] + headers, message = frame.headers, frame.body + else: + headers, message = args[0], args[1] _log.error("ERR: {}".format(headers)) _log.error("OUR ERROR: {}".format(message)) - def on_disconnect(self, header, message): - _log.debug("Disconnected") - listener = ResponseListener(reply_to, self.result_format) self.subscribe(reply_to, listener) - self._conn.send(body=message, - destination=topic, - headers={ - 'reply-to': reply_to, - 'GOSS_HAS_SUBJECT': True, - 'GOSS_SUBJECT': self.__token - }) + self._conn.send( + body=message, + destination=topic, + headers={"reply-to": reply_to, "GOSS_HAS_SUBJECT": True, "GOSS_SUBJECT": self.__token}, + ) count = 0 while count < timeout: @@ -255,8 +270,8 @@ def subscribe(self, topic, callback): self._make_connection() - if self._conn.get_listener('gridappsd') is None: - self._conn.set_listener('gridappsd', self._router_callback) + if self._conn.get_listener("gridappsd") is None: + self._conn.set_listener("gridappsd", self._router_callback) if callable(callback): self._router_callback.add_callback(topic, callback) @@ -265,13 +280,12 @@ def subscribe(self, topic, callback): # CallbackWrapperListener(callback, conn_id)) else: # Case where the callback is (supposedly) a class. - if not hasattr(callback, 'on_message'): + if not hasattr(callback, "on_message"): m = "The given callback must have an 'on_message' method!" raise AttributeError(m) if not callable(callback.on_message): - m = "The given callback's 'on_message' attribute must be " \ - "callable!" + m = "The given callback's 'on_message' attribute must be callable!" raise TypeError(m) # Fix for https://github.com/GRIDAPPSD/GOSS-GridAPPS-D/issues/1072 @@ -284,7 +298,7 @@ def subscribe(self, topic, callback): # CallbackWrapperListener(callback.on_message, conn_id)) _log.debug("Subscribing to {topic}".format(topic=topic)) - self._conn.subscribe(destination=topic, ack='auto', id=conn_id) + self._conn.subscribe(destination=topic, ack="auto", id=conn_id) return conn_id @@ -295,7 +309,6 @@ def _make_connection(self): if self._conn is None or not self._conn.is_connected(): _log.debug("Creating connection") if self.use_auth_token is True and not self.__token: - # get token # get initial connection dt = datetime.now() @@ -309,41 +322,47 @@ def _make_connection(self): # send request to token topic tokenTopic = "/topic/pnnl.goss.token.topic" - tmpConn = Connection([(self.stomp_address, self.stomp_port)], heartbeats=(self._heartbeat, self._heartbeat)) + tmpConn = Connection( + [(self.stomp_address, self.stomp_port)], heartbeats=(self._heartbeat, self._heartbeat) + ) if self._override_thread_fc is not None: tmpConn.transport.override_threading(self._override_thread_fc) tmpConn.connect(self.__user__, self.__pass__, wait=True) - class TokenResponseListener(): - + class TokenResponseListener: def __init__(self): self.__token = None def get_token(self): return self.__token - def on_message(self, header, message): + def on_message(self, *args): + if _STOMP_V8: + frame = args[0] + header, message = frame.headers, frame.body + else: + header, message = args[0], args[1] _log.debug("Internal on message is: {} {}".format(header, message)) self.__token = str(message) - def on_error(self, headers, message): + def on_error(self, *args): + if _STOMP_V8: + frame = args[0] + headers, message = frame.headers, frame.body + else: + headers, message = args[0], args[1] _log.error("ERR: {}".format(headers)) _log.error("OUR ERROR: {}".format(message)) - def on_disconnect(self, header, message): - _log.debug("Disconnected") - # receive token and set token variable # set callback listener = TokenResponseListener() # self.subscribe(replyDest, listener) - tmpConn.subscribe('/queue/' + replyDest, 123) - tmpConn.set_listener('token_resp', listener) - tmpConn.send(body=base64Str, - destination=tokenTopic, - headers={'reply-to': replyDest}) + tmpConn.subscribe("/queue/" + replyDest, 123) + tmpConn.set_listener("token_resp", listener) + tmpConn.send(body=base64Str, destination=tokenTopic, headers={"reply-to": replyDest}) # while token is null or for x iterations iter = 0 while not self.__token and iter < 10: @@ -352,14 +371,16 @@ def on_disconnect(self, header, message): sleep(1) iter += 1 - self._conn = Connection([(self.stomp_address, self.stomp_port)], heartbeats=(self._heartbeat, self._heartbeat)) + self._conn = Connection( + [(self.stomp_address, self.stomp_port)], heartbeats=(self._heartbeat, self._heartbeat) + ) if self._override_thread_fc is not None: self._conn.transport.override_threading(self._override_thread_fc) try: if self.use_auth_token and self.__token is not None: self._conn.connect(self.__token, "", wait=True) else: - self._conn.connect(self.__user__,self.__pass__, wait=True) + self._conn.connect(self.__user__, self.__pass__, wait=True) except TypeError as e: _log.error("TypeError: {e}".format(e=e)) except NotConnectedException as e: @@ -369,7 +390,6 @@ def on_disconnect(self, header, message): class CallbackRouter(object): - def __init__(self): self.callbacks = {} self._topics_callback_map = defaultdict(list) @@ -393,7 +413,7 @@ def run_callbacks(self): sleep(0.01) def add_callback(self, topic, callback): - if not topic.startswith('/topic/') and not topic.startswith('/temp-queue/'): + if not topic.startswith("/topic/") and not topic.startswith("/temp-queue/"): topic = "/queue/{topic}".format(topic=topic) if callback in self._topics_callback_map[topic]: raise ValueError("Callbacks can only be used one time per topic") @@ -408,26 +428,31 @@ def remove_callback(self, topic, callback): except ValueError: pass - def on_message(self, headers, message): - destination = headers['destination'] + def on_message(self, *args): + if _STOMP_V8: + frame = args[0] + headers, message = frame.headers, frame.body + else: + headers, message = args[0], args[1] + destination = headers["destination"] # _log.debug("Topic map keys are: {keys}".format(keys=self._topics_callback_map.keys())) if destination in self._topics_callback_map: self._queue_callerback.put((self._topics_callback_map[destination], headers, message)) else: _log.error("INVALID DESTINATION {destination}".format(destination=destination)) - def on_error(self, header, message): - _log.error("Error in callback router") - _log.error(header) - _log.error(message) - - def on_error(self, header, message): + def on_error(self, *args): + if _STOMP_V8: + frame = args[0] + header, message = frame.headers, frame.body + else: + header, message = args[0], args[1] _log.error("Error in callback router") _log.error(header) _log.error(message) def on_heartbeat_timeout(self): _log.error("Heartbeat timeout") - + def on_disconnected(self): _log.info("Disconnected") diff --git a/gridappsd-python-lib/gridappsd/gridappsd.py b/gridappsd-python-lib/gridappsd/gridappsd.py index 11b81d3..5f074ba 100644 --- a/gridappsd-python-lib/gridappsd/gridappsd.py +++ b/gridappsd-python-lib/gridappsd/gridappsd.py @@ -41,7 +41,7 @@ import inspect import logging from datetime import datetime -from logging import DEBUG, INFO, WARNING, FATAL, WARN +from logging import INFO import time from gridappsd.goss import GOSS @@ -63,23 +63,25 @@ class InvalidSimulationIdError(Exception): class GridAPPSD(GOSS): - """ The main :class:`GridAPPSD` interface for connecting to a GridAPPSD instance - """ + """The main :class:`GridAPPSD` interface for connecting to a GridAPPSD instance""" # TODO Get the caller from the traceback/inspect module. def __init__(self, simulation_id=None, address=None, **kwargs): - if address is None: address = utils.get_gridappsd_address() - if 'stomp_address' in kwargs and 'stomp_port' in kwargs: - address = (kwargs.pop('stomp_address'), kwargs.pop('stomp_port')) - elif 'stomp_address' in kwargs and not 'stomp_port' in kwargs or \ - 'stomp_port' in kwargs and not 'stomp_address' in kwargs: + if "stomp_address" in kwargs and "stomp_port" in kwargs: + address = (kwargs.pop("stomp_address"), kwargs.pop("stomp_port")) + elif ( + "stomp_address" in kwargs + and "stomp_port" not in kwargs + or "stomp_port" in kwargs + and "stomp_address" not in kwargs + ): raise ValueError("If stomp_address is specified the so should stomp_port") super(GridAPPSD, self).__init__(stomp_address=address[0], stomp_port=address[1], **kwargs) - self._houses = Houses(self) + self._houses: Houses = Houses(self) self._simulation_log_topic = None self._simulation_id = None # Transfer simulation_id from environment if its not passed @@ -122,8 +124,9 @@ def set_application_status(self, status): try: self._process_status = ProcessStatusEnum(status) except ValueError: - self.get_logger().warning("Unsuccessful change of application status." + - f"Valid statuses are {ProcessStatusEnum.__members__}.") + self.get_logger().warning( + "Unsuccessful change of application status." + f"Valid statuses are {ProcessStatusEnum.__members__}." + ) def set_service_status(self, status): """ @@ -133,8 +136,9 @@ def set_service_status(self, status): try: self._process_status = ProcessStatusEnum(status) except ValueError: - self.get_logger().warning("Unsuccessful change of service status." + - f"Valid statuses are {ProcessStatusEnum.__members__}.") + self.get_logger().warning( + "Unsuccessful change of service status." + f"Valid statuses are {ProcessStatusEnum.__members__}." + ) def set_simulation_id(self, simulation_id): if simulation_id is None: @@ -162,7 +166,7 @@ def get_service_status(self): return self._process_status.value def query_object_types(self, model_id=None): - """ Allows the caller to query the different object types. + """Allows the caller to query the different object types. :param model_id: :return: @@ -184,7 +188,7 @@ def query_model_info(self): payload = self._build_query_payload("QUERY_MODEL_INFO") return self.get_response(t.REQUEST_POWERGRID_DATA, payload, timeout=30) - def query_model(self, model_id=None, object_type=None, object_id=None, response_format='JSON'): + def query_model(self, model_id=None, object_type=None, object_id=None, response_format="JSON"): args = {} if model_id is not None: args["modelId"] = model_id @@ -223,25 +227,20 @@ def query_object_dictionary(self, model_id, object_type=None, object_id=None): def query_data(self, query, database_type=POWERGRID_MODEL, timeout=30): request_type = None if database_type == POWERGRID_MODEL: - request_type = 'QUERY' + request_type = "QUERY" else: raise ValueError("Only supported {} currently".format(POWERGRID_MODEL)) payload = self._build_query_payload(request_type, queryString=query) # Do this so we can eventually support other db through this mechanism. - request_topic = '.'.join((t.REQUEST_DATA, database_type)) + request_topic = ".".join((t.REQUEST_DATA, database_type)) return self.get_response(request_topic, json.dumps(payload), timeout=timeout) - def get_platform_status(self, - applications=True, - services=True, - appInstances=True, - serviceInstances=True): + def get_platform_status(self, applications=True, services=True, appInstances=True, serviceInstances=True): _log.debug("Retrieving platform status from GridAPPSD") - msg = dict(appInstances=appInstances, - applications=applications, - services=services, - serviceInstances=serviceInstances) + msg = dict( + appInstances=appInstances, applications=applications, services=services, serviceInstances=serviceInstances + ) return self.get_response(t.REQUEST_PLATFORM_STATUS, json.dumps(msg), timeout=30) def send_simulation_status(self, status, message, log_level=INFO): @@ -271,13 +270,13 @@ def build_message_json(self, status, message, log_level): "procesStatus": status, "logMessage": str(message), "logLevel": logging.getLevelName(log_level), - "storeToDb": True + "storeToDb": True, } data = json.dumps(status_message) return data - def _build_query_payload(self, request_type, response_format='JSON', **kwargs): + def _build_query_payload(self, request_type, response_format="JSON", **kwargs): d = dict(requestType=request_type, resultFormat=response_format) d.update(**kwargs) return d diff --git a/gridappsd-python-lib/gridappsd/houses.py b/gridappsd-python-lib/gridappsd/houses.py index 076eef9..1ec5524 100644 --- a/gridappsd-python-lib/gridappsd/houses.py +++ b/gridappsd-python-lib/gridappsd/houses.py @@ -1,22 +1,37 @@ +from __future__ import annotations from collections import namedtuple +from typing import TYPE_CHECKING, Any -house_keys = [ - 'name', 'parent', 'coolingSetpoint', 'coolingSystem', 'floorArea', 'heatingSetpoint', - 'heatingSystem', 'hvacPowerFactor', 'numberOfStories', 'thermalIntegrity', 'id', 'fdrid' -] -House = namedtuple('House', house_keys) +if TYPE_CHECKING: + from gridappsd.gridappsd import GridAPPSD + +House = namedtuple( + "House", + [ + "name", + "parent", + "coolingSetpoint", + "coolingSystem", + "floorArea", + "heatingSetpoint", + "heatingSystem", + "hvacPowerFactor", + "numberOfStories", + "thermalIntegrity", + "id", + "fdrid", + ], +) # class House(HouseBase): # def __dict__ class Houses: - class __SingltonHouses: - - def __init__(self, gappsd: 'GridAPPSD'): + def __init__(self, gappsd: GridAPPSD): self._gappsd = gappsd - self._houses = {} + self._houses: dict[str, dict[str, Any]] = {} def __str__(self): return repr(self) + self._gappsd @@ -27,7 +42,8 @@ def get_houses_for_feeder(self, feeder): return self._houses.get(feeder) def _populate(self, feeder): - query = """# list houses - DistHouse + query = ( + """# list houses - DistHouse PREFIX r: PREFIX c: SELECT ?fdrname ?name ?parent ?coolingSetpoint ?coolingSystem ?floorArea ?heatingSetpoint ?heatingSystem ?hvacPowerFactor ?numberOfStories ?thermalIntegrity ?id ?fdrid @@ -57,17 +73,19 @@ def _populate(self, feeder): ?fdr c:IdentifiedObject.name ?fdrname. ?econ c:Equipment.EquipmentContainer ?fdr. } ORDER BY ?fdrname ?name -""" % feeder +""" + % feeder + ) response = self._gappsd.query_data(query) houses = {} - for rec in response['data']['results']['bindings']: + for rec in response["data"]["results"]["bindings"]: create_order = {} name = None - for d in house_keys: - if d == 'name': - name = rec[d]['value'] + for d in House._fields: + if d == "name": + name = rec[d]["value"] try: - create_order[d] = rec[d]['value'] + create_order[d] = rec[d]["value"] except KeyError: create_order[d] = None diff --git a/gridappsd-python-lib/gridappsd/json_extension.py b/gridappsd-python-lib/gridappsd/json_extension.py index 692d0c6..da0978a 100644 --- a/gridappsd-python-lib/gridappsd/json_extension.py +++ b/gridappsd-python-lib/gridappsd/json_extension.py @@ -15,109 +15,110 @@ def jsonDecoderExtension(obj: Any): try: complexInstance = JsonComplex(**obj) rv = complex(complexInstance.real, complexInstance.imag) - except: + except (TypeError, KeyError): rv = obj return rv class JsonEncoderExtension(_json.JSONEncoder): - def default(self, obj: Any) -> Any: rv = None if isinstance(obj, complex): jsonComplexInstance = JsonComplex(real=obj.real, imag=obj.imag) rv = dataclasses.asdict(jsonComplexInstance) - elif dataclasses.is_dataclass(obj): + elif dataclasses.is_dataclass(obj) and not isinstance(obj, type): rv = dataclasses.asdict(obj) else: rv = super().default(obj) return rv -def dump(data: Any, - fo: TextIO, - *, - skipkeys=False, - ensure_ascii=True, - check_circular=True, - allow_nan=True, - indent=None, - separators=None, - default=None, - sort_keys=False, - **kw): - rv = _json.dump(data, - fo, - skipkeys=skipkeys, - ensure_ascii=ensure_ascii, - check_circular=check_circular, - allow_nan=allow_nan, - cls=JsonEncoderExtension, - indent=indent, - separators=separators, - default=default, - sort_keys=sort_keys, - **kw) +def dump( + data: Any, + fo: TextIO, + *, + skipkeys=False, + ensure_ascii=True, + check_circular=True, + allow_nan=True, + indent=None, + separators=None, + default=None, + sort_keys=False, + **kw, +) -> None: + _json.dump( + data, + fo, + skipkeys=skipkeys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + cls=JsonEncoderExtension, + indent=indent, + separators=separators, + default=default, + sort_keys=sort_keys, + **kw, + ) -def dumps(data: Any, - *, - skipkeys=False, - ensure_ascii=True, - check_circular=True, - allow_nan=True, - indent=None, - separators=None, - default=None, - sort_keys=False, - **kw) -> str: - rv = _json.dumps(data, - skipkeys=skipkeys, - ensure_ascii=ensure_ascii, - check_circular=check_circular, - allow_nan=allow_nan, - cls=JsonEncoderExtension, - indent=indent, - separators=separators, - default=default, - sort_keys=sort_keys, - **kw) +def dumps( + data: Any, + *, + skipkeys=False, + ensure_ascii=True, + check_circular=True, + allow_nan=True, + indent=None, + separators=None, + default=None, + sort_keys=False, + **kw, +) -> str: + rv = _json.dumps( + data, + skipkeys=skipkeys, + ensure_ascii=ensure_ascii, + check_circular=check_circular, + allow_nan=allow_nan, + cls=JsonEncoderExtension, + indent=indent, + separators=separators, + default=default, + sort_keys=sort_keys, + **kw, + ) return rv -def load(fo: TextIO, - *, - cls=None, - parse_float=None, - parse_int=None, - parse_constant=None, - object_pairs_hook=None, - **kw) -> Any: - rv = _json.load(fo, - cls=cls, - object_hook=jsonDecoderExtension, - parse_float=parse_float, - parse_int=parse_int, - parse_constant=parse_constant, - object_pairs_hook=object_pairs_hook, - **kw) +def load( + fo: TextIO, *, cls=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw +) -> Any: + rv = _json.load( + fo, + cls=cls, + object_hook=jsonDecoderExtension, + parse_float=parse_float, + parse_int=parse_int, + parse_constant=parse_constant, + object_pairs_hook=object_pairs_hook, + **kw, + ) return rv -def loads(data: str, - *, - cls=None, - parse_float=None, - parse_int=None, - parse_constant=None, - object_pairs_hook=None, - **kw) -> Any: - rv = _json.loads(data, - cls=cls, - object_hook=jsonDecoderExtension, - parse_float=parse_float, - parse_int=parse_int, - parse_constant=parse_constant, - object_pairs_hook=object_pairs_hook, - **kw) +def loads( + data: str, *, cls=None, parse_float=None, parse_int=None, parse_constant=None, object_pairs_hook=None, **kw +) -> Any: + rv = _json.loads( + data, + cls=cls, + object_hook=jsonDecoderExtension, + parse_float=parse_float, + parse_int=parse_int, + parse_constant=parse_constant, + object_pairs_hook=object_pairs_hook, + **kw, + ) return rv diff --git a/gridappsd-python-lib/gridappsd/loghandler.py b/gridappsd-python-lib/gridappsd/loghandler.py index a2997ef..0c34059 100644 --- a/gridappsd-python-lib/gridappsd/loghandler.py +++ b/gridappsd-python-lib/gridappsd/loghandler.py @@ -4,23 +4,23 @@ from . import topics as t _nameToLevel = { - 'FATAL': FATAL, - 'ERROR': ERROR, - 'WARN': WARN, - 'WARNING': WARN, - 'INFO': INFO, - 'DEBUG': DEBUG, - 'NOTSET': NOTSET, + "FATAL": FATAL, + "ERROR": ERROR, + "WARN": WARN, + "WARNING": WARN, + "INFO": INFO, + "DEBUG": DEBUG, + "NOTSET": NOTSET, } _levelToName = { - FATAL: 'FATAL', - ERROR: 'ERROR', - WARNING: 'WARN', - WARN: 'WARN', - INFO: 'INFO', - DEBUG: 'DEBUG', - NOTSET: 'NOTSET', + FATAL: "FATAL", + ERROR: "ERROR", + WARNING: "WARN", + WARN: "WARN", + INFO: "INFO", + DEBUG: "DEBUG", + NOTSET: "NOTSET", } VALID_LOG_LEVELS = set(_nameToLevel.values()) @@ -68,13 +68,14 @@ def log(self, message, level=DEBUG): """ process_identifier = self._gaps.get_application_id() - if not level in VALID_LOG_LEVELS: + if level not in VALID_LOG_LEVELS: raise AttributeError(f"Log level must be one of {[x for x in _levelToName.values()]}") if not process_identifier: raise AttributeError( - f"Must have GRIDAPPSD_APPLICATION_ID or GRIDAPPSD_SERVICE_ID or GRIDAPPSD_PROCESS_ID " - "set in os environments.") + "Must have GRIDAPPSD_APPLICATION_ID or GRIDAPPSD_SERVICE_ID or GRIDAPPSD_PROCESS_ID " + "set in os environments." + ) status = self._gaps.get_application_status() sim_id = self._gaps.get_simulation_id() @@ -89,11 +90,11 @@ def log(self, message, level=DEBUG): "processStatus": str(status), "logMessage": str(message), "logLevel": _levelToName[level], - "storeToDb": True + "storeToDb": True, } - gridappsd_log_level = os.getenv('GRIDAPPSD_LOG_LEVEL') - if gridappsd_log_level == None: + gridappsd_log_level = os.getenv("GRIDAPPSD_LOG_LEVEL") + if gridappsd_log_level is None: gridappsd_log_level = level else: gridappsd_log_level = _nameToLevel[gridappsd_log_level] diff --git a/gridappsd-python-lib/gridappsd/register_app.py b/gridappsd-python-lib/gridappsd/register_app.py index 1411816..18b9838 100644 --- a/gridappsd-python-lib/gridappsd/register_app.py +++ b/gridappsd-python-lib/gridappsd/register_app.py @@ -8,11 +8,13 @@ def main(): loglevel = logging.INFO - logging.basicConfig(stream=sys.stdout, - level=loglevel, - format="%(asctime)s - %(name)s;%(levelname)s|%(message)s", - datefmt="%Y-%m-%d %H:%M:%S") - logging.getLogger('stomp.py').setLevel(logging.ERROR) + logging.basicConfig( + stream=sys.stdout, + level=loglevel, + format="%(asctime)s - %(name)s;%(levelname)s|%(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + logging.getLogger("stomp.py").setLevel(logging.ERROR) _log = logging.getLogger(__name__) problems = utils.validate_gridappsd_uri() @@ -29,23 +31,22 @@ def main(): config = {} with open("/appconfig") as fo: config = json.load(fo) - #config = json.loads(open("/appconfig").read()) + # config = json.loads(open("/appconfig").read()) if "id" not in config: _log.error("Invalid appconfig, must have a unique id set.") sys.exit(1) - os.environ['GRIDAPPSD_APPLICATION_ID'] = config['id'] + os.environ["GRIDAPPSD_APPLICATION_ID"] = config["id"] appreg = None gap = None while True: - try: if gap is None: gap = GridAPPSD() - except ConnectionRefusedError: # Python 3 specific error code + except ConnectionRefusedError: # Python 3 specific error code _log.debug("Retry in 10 seconds") gap = appreg = None time.sleep(10) @@ -70,7 +71,7 @@ def end_app(): try: appreg = ApplicationController(config, gridappsd=gap) appreg.register_app(end_app) - _log.info('Application {} registered.'.format(config['id'])) + _log.info("Application {} registered.".format(config["id"])) except: _log.exception("An unhandled exception occured retrying app") appreg = None diff --git a/gridappsd-python-lib/gridappsd/simulation.py b/gridappsd-python-lib/gridappsd/simulation.py index 8f21815..c1b77bf 100644 --- a/gridappsd-python-lib/gridappsd/simulation.py +++ b/gridappsd-python-lib/gridappsd/simulation.py @@ -1,8 +1,9 @@ +from __future__ import annotations from dataclasses import dataclass, field import time import logging -from typing import Dict, List, Union +from typing import Any, Callable import gridappsd.topics as t from gridappsd import GridAPPSD @@ -13,12 +14,12 @@ class SimulationFailedToStartError(Exception): """Exception raised if a simulation fails to start.""" + pass @dataclass class ConfigBase: - def asjson(self): return json.dumps(self.asdict()) @@ -27,6 +28,13 @@ def asdict(self): for k, v in self.__dict__.items(): if isinstance(v, ConfigBase): built[k] = v.asdict() + elif isinstance(v, list): + built[k] = [] + for item in v: + if isinstance(item, ConfigBase): + built[k].append(item.asdict()) + else: + built[k].append(item) else: built[k] = v return built @@ -34,13 +42,13 @@ def asdict(self): @dataclass class ModelCreationConfig(ConfigBase): - load_scaling_factor: str = field(default = "1") - schedule_name: str = field(default = "ieeezipload") - z_fraction: str = field(default = "0") - i_fraction: str = field(default = "1") - p_fraction: str = field(default = "0") - randomize_zipload_fractions: bool = field(default = False) - use_houses: bool = field(default = False) + load_scaling_factor: str = field(default="1") + schedule_name: str = field(default="ieeezipload") + z_fraction: str = field(default="0") + i_fraction: str = field(default="1") + p_fraction: str = field(default="0") + randomize_zipload_fractions: bool = field(default=False) + use_houses: bool = field(default=False) # __default_model_creation_config__ = ModelCreationConfig() @@ -48,16 +56,20 @@ class ModelCreationConfig(ConfigBase): @dataclass class SimulationArgs(ConfigBase): - start_time: str = field(default = "1655321830") - duration: str = field(default = "300") - simulator: str = field(default = "GridLAB-D") - timestep_frequency: str = field(default = "1000") - timestep_increment: str = field(default = "1000") - run_realtime: bool = field(default = True) - pause_after_measurements: bool = field(default = False) - simulation_name: str = field(default = "ieee13nodeckt") - power_flow_solver_method: str = field(default = "NR") - model_creation_config: ModelCreationConfig = field(default_factory = ModelCreationConfig) + start_time: str = field(default="1655321830") + duration: str = field(default="300") + timestep_frequency: str = field(default="1000") + timestep_increment: str = field(default="1000") + run_realtime: bool = field(default=True) + pause_after_measurements: bool = field(default=False) + simulation_name: str = field(default="ieee13nodeckt") + + +@dataclass +class SimulatorArgs(ConfigBase): + simulator: str = field(default="GridLAB-D") + model_creation_config: ModelCreationConfig = field(default_factory=ModelCreationConfig) + power_flow_solver_method: str = field(default="NR") # __default_simulation_args__ = SimulationArgs() @@ -70,7 +82,7 @@ class Application(ConfigBase): @dataclass class ApplicationConfig(ConfigBase): - applications: List[Application] = field(default_factory=list) + applications: list[Application] = field(default_factory=list) # __default_application_config__ = ApplicationConfig() @@ -78,8 +90,8 @@ class ApplicationConfig(ConfigBase): @dataclass class TestConfig(ConfigBase): - events: List[Dict] = field(default_factory=list) - appId: str = field(default = "") + events: list[dict[str, Any]] = field(default_factory=list) + appId: str = field(default="") # __default_test_config__ = TestConfig() @@ -93,22 +105,23 @@ class ServiceConfig(ConfigBase): @dataclass class PowerSystemConfig(ConfigBase): Line_name: str - GeographicalRegion_name: str = field(default = None) - SubGeographicalRegion_name: str = field(default = None) + GeographicalRegion_name: str = field(default="") + SubGeographicalRegion_name: str = field(default="") + simulator_config: SimulatorArgs = field(default_factory=SimulatorArgs) @dataclass class SimulationConfig(ConfigBase): - power_system_config: PowerSystemConfig - application_configs: List[ApplicationConfig] = field(default_factory=list) + power_system_configs: list[PowerSystemConfig] = field(default_factory=list) + application_configs: list[ApplicationConfig] = field(default_factory=list) simulation_config: SimulationArgs = field(default_factory=SimulationArgs) - service_configs: List[ServiceConfig] = field(default_factory=list) + service_configs: list[ServiceConfig] = field(default_factory=list) application_config: ApplicationConfig = field(default_factory=ApplicationConfig) test_config: TestConfig = field(default_factory=TestConfig) class Simulation: - """ Simulation object allows controlling simulations through a python API. + """Simulation object allows controlling simulations through a python API. The simulation object allows controlling and monitoring of simulations through a python API. It is capable of starting, stopping, pausing and restarting simulations @@ -119,7 +132,7 @@ class Simulation: add_onmeasurement_callback, add_oncomplete_callback or add_onstart_callback method respectively. """ - def __init__(self, gapps: GridAPPSD, run_config: Union[Dict, SimulationConfig]): + def __init__(self, gapps: GridAPPSD, run_config: dict[str, Any] | SimulationConfig): assert isinstance(gapps, GridAPPSD), "Must be an instance of GridAPPSD" if isinstance(run_config, SimulationConfig): self._run_config = run_config.asdict() @@ -148,11 +161,11 @@ def __init__(self, gapps: GridAPPSD, run_config: Union[Dict, SimulationConfig]): self._running_or_paused = False # Will be populated when the simulation is first started. - self.simulation_id = None + self.simulation_id: str | None = None - self.__on_start = set() - self.__on_next_timestep_callbacks = set() - self.__on_simulation_complete_callbacks = set() + self.__on_start: set[Callable[..., Any]] = set() + self.__on_next_timestep_callbacks: set[Callable[..., Any]] = set() + self.__on_simulation_complete_callbacks: set[Callable[..., Any]] = set() self._measurement_count = 0 self._log_count = 0 @@ -164,21 +177,20 @@ def __init__(self, gapps: GridAPPSD, run_config: Union[Dict, SimulationConfig]): # float(self._run_config.simulation_config.duration)) # Devices that the user wants measurements from - self._device_measurement_filter = {} + self._device_measurement_filter: dict[str, Any] = {} - self.__filterable_measurement_callback_set = set() + self.__filterable_measurement_callback_set: set[Callable[..., Any]] = set() def start_simulation(self, timeout=30): - """ Start the configured simulation by calling the REQUEST_SIMULATION endpoint. - """ + """Start the configured simulation by calling the REQUEST_SIMULATION endpoint.""" resp = self._gapps.get_response(t.REQUEST_SIMULATION, self._run_config, timeout=timeout) - if 'simulationId' not in resp: + if "simulationId" not in resp: message = "Simulation was not able to run\n" + str(resp) raise SimulationFailedToStartError(message) self._running_or_paused = True - self.simulation_id = resp['simulationId'] + self.simulation_id = resp["simulationId"] # Subscribe to the different components necessary to run and receive # simulated measurements and messages. @@ -190,28 +202,28 @@ def start_simulation(self, timeout=30): p(self) def pause(self): - """ Pause simulation""" + """Pause simulation""" _log.debug("Pausing simulation") command = dict(command="pause") self._gapps.send(t.simulation_input_topic(self.simulation_id), json.dumps(command)) self._running_or_paused = True def stop(self): - """ Stop the simulation""" + """Stop the simulation""" _log.debug("Stopping simulation") command = dict(command="stop") self._gapps.send(t.simulation_input_topic(self.simulation_id), json.dumps(command)) self._running_or_paused = True def resume(self): - """ Resume the simulation""" + """Resume the simulation""" _log.debug("Resuming simulation") command = dict(command="resume") self._gapps.send(t.simulation_input_topic(self.simulation_id), json.dumps(command)) self._running_or_paused = True def run_loop(self): - """ Loop around the running of the simulation itself. + """Loop around the running of the simulation itself. Example: @@ -234,7 +246,7 @@ def run_loop(self): time.sleep(0.01) def resume_pause_at(self, pause_in): - """ Resume the simulation and have it automatically pause after specified amount of seconds later. + """Resume the simulation and have it automatically pause after specified amount of seconds later. :param pause_in: number of seconds to run before pausing the simulation """ @@ -244,7 +256,7 @@ def resume_pause_at(self, pause_in): self._running_or_paused = True def add_onmeasurement_callback(self, callback, device_filter=()): - """ registers an onmeasurment callback to be called when measurements have come through. + """registers an onmeasurment callback to be called when measurements have come through. Note: @@ -265,7 +277,7 @@ def onmeasurment(sim, timestep, measurements): self.__filterable_measurement_callback_set.add((callback, device_filter)) def add_onstart_callback(self, callback): - """ registers a start callback that is called when the simulation is started + """registers a start callback that is called when the simulation is started Callback Example: @@ -280,7 +292,7 @@ def onstart(sim): self.__on_start.add(callback) def add_oncomplete_callback(self, callback): - """ registers a completion callback when the last timestep has been requested. + """registers a completion callback when the last timestep has been requested. Callback Example: @@ -293,7 +305,7 @@ def onfinishsimulation(sim): self.__on_simulation_complete_callbacks.add(callback) def add_ontimestep_callback(self, callback): - """ register a timestep callback + """register a timestep callback Callback Example: @@ -307,18 +319,18 @@ def ontimestep(sim, timestep): def __on_platformlog(self, headers, message): try: - if self.simulation_id == message['processId']: + if self.simulation_id == message["processId"]: _log.debug(f"__on_platform_log: message: {message}") except KeyError as e: _log.error(f"__on_platformlog keyerror({e}): {message}") - if 'command' in message: + if "command" in message: _log.debug("Command was: {}".format(message)) def __on_simulation_log(self, headers, message): # Handle the callbacks here - if 'logMessage' in message: - log_message = message['logMessage'] + if "logMessage" in message: + log_message = message["logMessage"] # if this is the last timestamp then call the finished callbacks if log_message == f"Simulation {self.simulation_id} complete": for p in self.__on_simulation_complete_callbacks: @@ -326,29 +338,29 @@ def __on_simulation_log(self, headers, message): self._running_or_paused = False _log.debug("Simulation completed") elif log_message.startswith("incrementing to "): - timestep = log_message[len("incrementing to "):] + timestep = log_message[len("incrementing to ") :] for p in self.__on_next_timestep_callbacks: p(self, int(timestep)) def __onmeasurement(self, headers, message): - """ Call the measurement callbacks + """Call the measurement callbacks :param headers: :param message: :return: """ - sim_id = message['simulation_id'] - timestamp = message['message']['timestamp'] - measurements = message['message']['measurements'] + timestamp = message["message"]["timestamp"] + measurements = message["message"]["measurements"] for p in self.__filterable_measurement_callback_set: p[0](self, timestamp, measurements) -if __name__ == '__main__': +if __name__ == "__main__": from pprint import pprint - psc = PowerSystemConfig(Line_name="_49AD8E07-3BF9-A4E2-CB8F-C3722F837B62") - sim = SimulationConfig(power_system_config=psc) - print(psc.asjson()) + psc = [PowerSystemConfig(Line_name="_49AD8E07-3BF9-A4E2-CB8F-C3722F837B62")] + sim = SimulationConfig(power_system_configs=psc) + + # print(psc.asjson()) print(sim.asjson()) pprint(json.loads(sim.asjson()), indent=2) diff --git a/gridappsd-python-lib/gridappsd/timeseries.py b/gridappsd-python-lib/gridappsd/timeseries.py index d2ce8b1..f58e2cd 100644 --- a/gridappsd-python-lib/gridappsd/timeseries.py +++ b/gridappsd-python-lib/gridappsd/timeseries.py @@ -1,11 +1,10 @@ -#import json +# import json from gridappsd import topics, json_extension as json from typing import Optional class Query: - """ Class to create and execute request to query timeseries data - """ + """Class to create and execute request to query timeseries data""" def __init__(self, measurement): self.queryMeasurement = measurement @@ -14,7 +13,7 @@ def __init__(self, measurement): self.key = None def select(self, *keys): - """ Defines what fields should be returned from the query. + """Defines what fields should be returned from the query. If this function is not called or is called without argument then all the fields are returned. @@ -26,7 +25,7 @@ def select(self, *keys): return self def first(self, n=Optional[int]): - """ Method to add request to return first or oldest data to the query. + """Method to add request to return first or oldest data to the query. When n is specified, query will return first or oldest 'n' rows. @@ -37,7 +36,7 @@ def first(self, n=Optional[int]): return self def last(self, n=Optional[int]): - """ Method to add request to return last or latest data to the query. + """Method to add request to return last or latest data to the query. When n is specified, query will return last or latest 'n' rows. @@ -48,7 +47,7 @@ def last(self, n=Optional[int]): return self def ge(self, value): - """ Method to add 'value greater than or equal to' filter to a key. + """Method to add 'value greater than or equal to' filter to a key. :param value: """ @@ -59,7 +58,7 @@ def ge(self, value): return self def le(self, value): - """ Method to add 'value less than or equal to' filter to a key. + """Method to add 'value less than or equal to' filter to a key. :param value: """ @@ -70,7 +69,7 @@ def le(self, value): return self def eq(self, value): - """ Method to add 'value equal to' filter to a key. + """Method to add 'value equal to' filter to a key. :param value: """ @@ -81,7 +80,7 @@ def eq(self, value): return self def between(self, value1, value2): - """ Method to add 'value between' value1 and value2 filter to a key. + """Method to add 'value between' value1 and value2 filter to a key. :param value1: defines 'greater than equal to' filter for key's value :param value2: defines 'less than equal to' filter for key's value @@ -100,6 +99,5 @@ def where_key(self, key): def execute(self, gridappsd_obj): del self.key - response = gridappsd_obj.get_response(topics.REQUEST_TIMESERIES_DATA, - json.dumps(self.__dict__)) + response = gridappsd_obj.get_response(topics.REQUEST_TIMESERIES_DATA, json.dumps(self.__dict__)) return response diff --git a/gridappsd-python-lib/gridappsd/topics.py b/gridappsd-python-lib/gridappsd/topics.py index 89a345e..e61c610 100644 --- a/gridappsd-python-lib/gridappsd/topics.py +++ b/gridappsd-python-lib/gridappsd/topics.py @@ -37,17 +37,17 @@ # PACIFIC NORTHWEST NATIONAL LABORATORY operated by BATTELLE for the # UNITED STATES DEPARTMENT OF ENERGY under Contract DE-AC05-76RL01830 # ------------------------------------------------------------------------------- -DEFAULT_FNCS_LOCATION = 'tcp://localhost:5570' +DEFAULT_FNCS_LOCATION = "tcp://localhost:5570" -BASE_TOPIC = '/topic/goss.gridappsd' -FNCS_BASE_INPUT_TOPIC = '/topic/goss.gridappsd.simulation.input' -FNCS_BASE_OUTPUT_TOPIC = '/topic/goss.gridappsd.simulation.output' -BASE_SIMULATION_TOPIC = '/topic/goss.gridappsd.simulation' +BASE_TOPIC = "/topic/goss.gridappsd" +FNCS_BASE_INPUT_TOPIC = "/topic/goss.gridappsd.simulation.input" +FNCS_BASE_OUTPUT_TOPIC = "/topic/goss.gridappsd.simulation.output" +BASE_SIMULATION_TOPIC = "/topic/goss.gridappsd.simulation" BASE_SIMULATION_LOG_TOPIC = "/topic/goss.gridappsd.simulation.log" -BASE_FIELD_TOPIC = '/topic/goss.gridappsd.field' +BASE_FIELD_TOPIC = "/topic/goss.gridappsd.field" -BASE_FIELD_QUEUE = 'goss.gridappsd.field' -REGISTER_AGENT_QUEUE = 'goss.gridappsd.field.register.agent' +BASE_FIELD_QUEUE = "goss.gridappsd.field" +REGISTER_AGENT_QUEUE = "goss.gridappsd.field.register.agent" BLAZEGRAPH = "/queue/goss.gridappsd.process.request.data.powergridmodel" # https://gridappsd.readthedocs.io/en/latest/using_gridappsd/index.html#querying-logs @@ -75,13 +75,12 @@ def platform_log_topic(): - """ Utility method for getting the platform.log base topic - """ + """Utility method for getting the platform.log base topic""" return "/topic/{}.{}".format(BASE_TOPIC_PREFIX, "platform.log") def service_input_topic(service_id: str, simulation_id: int | str | None = None): - """ Utility method for getting the input topic for a specific service. + """Utility method for getting the input topic for a specific service. The service id should be the registered service with the platform. One can get the list of registered services by using the `GridAPPSD.get_platform_status()` @@ -103,7 +102,7 @@ def service_input_topic(service_id: str, simulation_id: int | str | None = None) def service_output_topic(service_id: str, simulation_id: int | str | None = None): - """ Utility method for getting the output topic for a specific service. + """Utility method for getting the output topic for a specific service. The service id should be the registered service with the platform. One can get the list of registered services by using the `GridAPPSD.get_platform_status()` @@ -125,7 +124,7 @@ def service_output_topic(service_id: str, simulation_id: int | str | None = None def application_input_topic(application_id: str, simulation_id: int | str | None = None): - """ Utility method for getting the input topic for a specific application. + """Utility method for getting the input topic for a specific application. The application_id should be the registered service with the platform. One can get the list of registered application by using the `GridAPPSD.get_platform_status()` @@ -144,7 +143,7 @@ def application_input_topic(application_id: str, simulation_id: int | str | None def application_output_topic(application_id: str, simulation_id: int | str | None = None): - """ Utility method for getting the output topic for a specific application. + """Utility method for getting the output topic for a specific application. The application_id should be the registered service with the platform. One can get the list of registered application by using the `GridAPPSD.get_platform_status()` @@ -163,31 +162,30 @@ def application_output_topic(application_id: str, simulation_id: int | str | Non def simulation_output_topic(simulation_id): - """ Gets the topic for subscribing to output from the simulation. + """Gets the topic for subscribing to output from the simulation. :param simulation_id: :return: str: Topic to subscribe to data from teh simulation. """ - return "{}.{}.{}".format(BASE_SIMULATION_TOPIC, 'output', simulation_id) + return "{}.{}.{}".format(BASE_SIMULATION_TOPIC, "output", simulation_id) def simulation_input_topic(simulation_id): - """ Gets the topic to write data to for the simulation + """Gets the topic to write data to for the simulation :param simulation_id: :return: str: Topic to write data for the simulation. """ - return "{}.{}.{}".format(BASE_SIMULATION_TOPIC, 'input', simulation_id) + return "{}.{}.{}".format(BASE_SIMULATION_TOPIC, "input", simulation_id) def simulation_log_topic(simulation_id): - """https://gridappsd.readthedocs.io/en/latest/using_gridappsd/index.html#subscribing-to-logs - """ + """https://gridappsd.readthedocs.io/en/latest/using_gridappsd/index.html#subscribing-to-logs""" return "{}.{}".format(BASE_SIMULATION_LOG_TOPIC, simulation_id) def field_message_bus_topic(message_bus_id: str, app_id: str = None, agent_id: str = None): - """ Utility method for getting the publish/subscribe topic for a specific message bus. + """Utility method for getting the publish/subscribe topic for a specific message bus. :param message_bus_id: :param app_id: @@ -200,7 +198,7 @@ def field_message_bus_topic(message_bus_id: str, app_id: str = None, agent_id: s def field_message_bus_app_topic(message_bus_id, app_id=None): - """ Utility method for getting the publish/subscribe topic for a specific message bus. + """Utility method for getting the publish/subscribe topic for a specific message bus. :param message_bus_id: :param app_id: @@ -211,7 +209,7 @@ def field_message_bus_app_topic(message_bus_id, app_id=None): def field_message_bus_agent_topic(message_bus_id, agent_id=None): - """ Utility method for getting the publish/subscribe topic for a specific message bus. + """Utility method for getting the publish/subscribe topic for a specific message bus. :param message_bus_id: :param agent_id: @@ -222,7 +220,7 @@ def field_message_bus_agent_topic(message_bus_id, agent_id=None): def field_agent_request_queue(message_bus_id, agent_id): - """ Utility method for getting the request topic for a specific distributed agent + """Utility method for getting the request topic for a specific distributed agent :param message_bus_id: :param agent_id: @@ -233,19 +231,18 @@ def field_agent_request_queue(message_bus_id, agent_id): def context_request_queue(message_bus_id): - """ Utility method for getting the request topic for context manager + """Utility method for getting the request topic for context manager :param message_bus_id: :return: """ assert message_bus_id, "message_bus_id cannot be empty" - return "{}.request.{}.{}".format(BASE_FIELD_QUEUE, message_bus_id, - message_bus_id + '.context_manager') + return "{}.request.{}.{}".format(BASE_FIELD_QUEUE, message_bus_id, message_bus_id + ".context_manager") def field_output_topic(message_bus_id=None, simulation_id=None): - """ Utility method for getting the field output topic. + """Utility method for getting the field output topic. If message_bus_id is None, it returns topic used by centralized device interfaces to publish measurements. If message_bus_id is not None, it returns topic used by distributed devices interfaces to publish measurements which is then subscribed by distributed agents. @@ -257,12 +254,11 @@ def field_output_topic(message_bus_id=None, simulation_id=None): if simulation_id is None: return "{}.{}".format(BASE_FIELD_TOPIC, "output") else: - return "{}.{}.{}.{}".format(BASE_FIELD_TOPIC, "simulation.output", simulation_id, - message_bus_id) + return "{}.{}.{}.{}".format(BASE_FIELD_TOPIC, "simulation.output", simulation_id, message_bus_id) def field_input_topic(message_bus_id=None, simulation_id=None): - """ Utility method for getting the field input topic. + """Utility method for getting the field input topic. If message_bus_id is None, it returns topic used by centralized device interfaces to subscribe to control commands. If message_bus_id is not None, it returns topic used by distributed devices interfaces to subscribe to control commands. @@ -274,5 +270,4 @@ def field_input_topic(message_bus_id=None, simulation_id=None): if simulation_id is None: return "{}.{}".format(BASE_FIELD_TOPIC, "input") else: - return "{}.{}.{}.{}".format(BASE_FIELD_TOPIC, "simulation.input", simulation_id, - message_bus_id) + return "{}.{}.{}.{}".format(BASE_FIELD_TOPIC, "simulation.input", simulation_id, message_bus_id) diff --git a/gridappsd-python-lib/gridappsd/utils.py b/gridappsd-python-lib/gridappsd/utils.py index ab718bf..701d8c7 100644 --- a/gridappsd-python-lib/gridappsd/utils.py +++ b/gridappsd-python-lib/gridappsd/utils.py @@ -1,10 +1,12 @@ -import datetime, time +import datetime +import time from enum import Enum from typing import Optional from dateutil import parser import os -try: # python2.7 + +try: # python2.7 from urlparse import urlparse except ImportError: from urllib.parse import urlparse @@ -71,7 +73,7 @@ def get_gridappsd_address(): def get_gridappsd_application_id(): - """ Retrieve the application_id from the environment. + """Retrieve the application_id from the environment. In order to use this function an environmental variable `GRIDAPPSD_APPLICATION_ID` must have been set. For docker containers this is done in the @@ -85,7 +87,7 @@ def get_gridappsd_application_id(): def get_gridappsd_simulation_id() -> Optional[str]: - """ Retrieve simulation_id from environment. + """Retrieve simulation_id from environment. This method will return a `None` if the GRIDAPPSD_SIMULATION_ID environmental variable is not set. diff --git a/gridappsd-python-lib/gridappsd_python_testing.ipynb b/gridappsd-python-lib/gridappsd_python_testing.ipynb new file mode 100644 index 0000000..41fd1e7 --- /dev/null +++ b/gridappsd-python-lib/gridappsd_python_testing.ipynb @@ -0,0 +1,102 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "fa474e10", + "metadata": {}, + "outputs": [], + "source": [ + "from gridappsd import GridAPPSD\n", + "import gridappsd.topics as topics" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "341dcd4e", + "metadata": {}, + "outputs": [], + "source": [ + "addr = [\"localhost\", 61613]\n", + "user = \"system\"\n", + "password = \"manager\"" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "54a6492b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "g = GridAPPSD(address=addr, username=user, password=password)\n", + "g.connect()\n", + "g.connected\n" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "59297d02", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "goss.gridappsd.process.request.data.powergridmodel\n" + ] + } + ], + "source": [ + "message = {\n", + " \"requestType\": \"QUERY_MODEL_NAMES\",\n", + " \"resultFormat\": \"JSON\"\n", + "}\n", + "print(topics.REQUEST_POWERGRID_DATA)\n", + "#g.get_response(topics.REQUEST_POWERGRID_DATA, message, timeout=120)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4b8584a1", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "gridappsd-python-py3.13", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.13.5" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/gridappsd-python-lib/pyproject.toml b/gridappsd-python-lib/pyproject.toml index a627dd9..580abe4 100644 --- a/gridappsd-python-lib/pyproject.toml +++ b/gridappsd-python-lib/pyproject.toml @@ -1,71 +1,37 @@ -[tool.poetry] +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] name = "gridappsd-python" -version = "2025.6.0" +version = "2026.1.1b2" description = "A GridAPPS-D Python Adapter" +readme = "README.md" +license = "BSD-3-Clause" +requires-python = ">=3.10,<4.0" authors = [ - "C. Allwardt <3979063+craig8@users.noreply.github.com>", - "P. Sharma =1.2.0"] -build-backend = "poetry.core.masonry.api" - -[tool.yapfignore] -ignore_patterns = [ - ".venv/**", - ".pytest_cache/**", - "dist/**", - "docs/**" +dependencies = [ + "PyYAML>=6.0", + "pytz>=2022.7", + "dateutils>=0.6.7", + "stomp-py==6.0.0", + "requests>=2.28", + "python-dotenv>=0.9", + "loguru>=0.7", ] -[tool.yapf] -based_on_style = "pep8" -spaces_before_comment = 4 -column_limit = 99 -split_before_logical_operator = true +[project.urls] +Repository = "https://github.com/GRIDAPPSD/gridappsd-python" +Homepage = "https://gridappsd.readthedocs.io" +[project.scripts] +register_app = "gridappsd.register_app:main" +gridappsd-cli = "gridappsd.cli:_main" -[tool.poetry.requires-plugins] -poetry-plugin-export = ">=1.8" +[tool.hatch.build.targets.wheel] +packages = ["gridappsd"] diff --git a/gridappsd-python-lib/tests/test_simulation.py b/gridappsd-python-lib/tests/test_simulation.py index b6c9365..47dcceb 100644 --- a/gridappsd-python-lib/tests/test_simulation.py +++ b/gridappsd-python-lib/tests/test_simulation.py @@ -1,43 +1,65 @@ -# import json -# # from pprint import pprint -# import logging -# import os -# import sys -# import time -# import pytest +import json +import logging +import os +import sys +import time +import pytest +from datetime import datetime, timezone -# logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) +logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) -# from gridappsd import GridAPPSD, topics as t -# from gridappsd.simulation import Simulation +from gridappsd import GridAPPSD, topics as t +from gridappsd.simulation import Simulation, PowerSystemConfig, SimulationArgs, SimulationConfig -# # The directory containing this file -# HERE = os.path.dirname(__file__) +simulation_is_complete = False +measurements_received = 0 -# def base_config(): -# data = {"power_system_config":{"SubGeographicalRegion_name":"_ABEB635F-729D-24BF-B8A4-E2EF268D8B9E","GeographicalRegion_name":"_73C512BD-7249-4F50-50DA-D93849B89C43","Line_name":"_49AD8E07-3BF9-A4E2-CB8F-C3722F837B62"},"simulation_config":{"power_flow_solver_method":"NR","duration":120,"simulation_name":"ieee13nodeckt","simulator":"GridLAB-D","start_time":1605418946,"run_realtime":False,"simulation_output":{},"model_creation_config":{"load_scaling_factor":1.0,"triplex":"y","encoding":"u","system_frequency":60,"voltage_multiplier":1.0,"power_unit_conversion":1.0,"unique_names":"y","schedule_name":"ieeezipload","z_fraction":0.0,"i_fraction":1.0,"p_fraction":0.0,"randomize_zipload_fractions":False,"use_houses":False},"simulation_broker_port":51044,"simulation_broker_location":"127.0.0.1"},"application_config":{"applications":[]},"service_configs":[],"test_config":{"randomNum":{"seed":{"value":185213303967438},"nextNextGaussian":0.0,"haveNextNextGaussian":False},"events":[],"testInput":True,"testOutput":True,"appId":"","testId":"1468836560","testType":"simulation_vs_expected","storeMatches":False},"simulation_request_type":"NEW"} -# # with open("{HERE}/simulation_fixtures/13_node_2_min_base.json".format(HERE=HERE)) as fp: -# # data = json.load(fp) -# return data +@pytest.fixture +def createGadObject(): + gad_user = os.environ.get('GRIDAPPSD_USER') + if gad_user is None: + os.environ['GRIDAPPSD_USER'] = 'system' + gad_password = os.environ.get('GRIDAPPSD_PASSWORD') + if gad_password is None: + os.environ['GRIDAPPSD_PASSWORD'] = 'manager' + return GridAPPSD() -# def test_simulation_no_duplicate_measurement_timestamps(gridappsd_client: GridAPPSD): -# num_measurements = 0 -# timestamps = set() - -# def measurement(sim, timestamp, measurement): -# nonlocal num_measurements -# num_measurements += 1 -# assert timestamp not in timestamps -# timestamps.add(timestamp) - -# gapps = gridappsd_client -# sim = Simulation(gapps, base_config()) -# sim.add_onmeasurement_callback(measurement) -# sim.start_simulation() -# sim.run_loop() - -# # did we get a measurement? -# assert num_measurements > 0 - -# # if empty then we know the simulation did not work. -# assert timestamps +@pytest.mark.integration +def test_createSimulations(createGadObject): + gadObj = createGadObject + response = gadObj.query_model_info() + models = response.get("data", {}).get("models", {}) + start_time = int(datetime(year=2025, month=1, day=1, hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc).timestamp()) + simulationArgs = SimulationArgs(start_time=f"{start_time}", + duration="120", + run_realtime=False, + pause_after_measurements=False) + sim_config = SimulationConfig(simulation_config=simulationArgs) + modelsToRun = [ + "49AD8E07-3BF9-A4E2-CB8F-C3722F837B62", # IEEE 13 Node Test Feeder + "C1C3E687-6FFD-C753-582B-632A27E28507" # IEEE 123 Node Test Feeder + ] + for m in models: + if m.get("modelId") not in modelsToRun: + continue + line_name = m.get("modelId") + subregion_name = m.get("subRegionId") + region_name = m.get("regionId") + psc = PowerSystemConfig(Line_name=line_name, + SubGeographicalRegion_name=subregion_name, + GeographicalRegion_name=region_name) + sim_config.power_system_configs.append(psc) + sim_obj = Simulation(gapps=gadObj, run_config=sim_config) + def on_measurement(sim, ts, m): + global measurements_received + measurements_received += 1 + def on_simulation_complete(sim): + global simulation_is_complete + simulation_is_complete = True + sim_obj.add_onmeasurement_callback(on_measurement) + sim_obj.add_oncomplete_callback(on_simulation_complete) + sim_obj.start_simulation() + while not simulation_is_complete: + time.sleep(1) + assert measurements_received == 1 + gadObj.disconnect() diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..ac6e443 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,17 @@ +[mypy] +# Target Python 3.10+ which supports lowercase generics (dict, list, etc.) +python_version = 3.10 + +# Import handling +ignore_missing_imports = True + +# Warning settings +warn_return_any = True +warn_unused_ignores = True + +# Allow untyped definitions in legacy code +disallow_untyped_defs = False +disallow_incomplete_defs = False + +# Don't require Optional for arguments with None defaults +implicit_optional = True diff --git a/pixi.toml b/pixi.toml new file mode 100644 index 0000000..d7d6176 --- /dev/null +++ b/pixi.toml @@ -0,0 +1,166 @@ +[workspace] +name = "gridappsd-python-workspace" +version = "2026.1.1b2" +description = "A GridAPPS-D Python Adapter" +authors = [ + "C. Allwardt <3979063+craig8@users.noreply.github.com>", + "P. Sharma ", + "A. Fisher ", +] +license = "BSD-3-Clause" +readme = "README.md" +channels = ["conda-forge"] +platforms = ["linux-64", "osx-arm64", "osx-64", "win-64"] + +[dependencies] +python = ">=3.10,<4.0" + +[pypi-dependencies] +# Main library dependencies +pyyaml = ">=6.0" +pytz = ">=2022.7" +dateutils = ">=0.6.7" +stomp-py = ">=6.0.0" +requests = ">=2.28" +python-dotenv = ">=0.9" +loguru = ">=0.7" + +# Field bus dependencies +cim-graph = ">=0.4.3a6" +click = ">=8.1" + +# Install local packages in editable mode +gridappsd-python = { path = "gridappsd-python-lib", editable = true } +gridappsd-field-bus = { path = "gridappsd-field-bus-lib", editable = true } + +[feature.dev.pypi-dependencies] +pytest = ">=8.0" +pytest-cov = ">=4.0" +pytest-html = ">=3.1" +mypy = ">=1.3" +types-python-dateutil = ">=2.8" +types-pyyaml = ">=6.0" +ruff = ">=0.1" +pre-commit = ">=3.0" +python-on-whales = ">=0.60" +gitpython = ">=3.1" +sphinx = ">=7.0" +build = ">=1.0" +twine = ">=4.0" + +# ══════════════════════════════════════════════════════════════════════════════ +# Python Version Features (for CI matrix testing) +# ══════════════════════════════════════════════════════════════════════════════ + +[feature.py310.dependencies] +python = "~=3.10.0" + +[feature.py311.dependencies] +python = "~=3.11.0" + +[feature.py312.dependencies] +python = "~=3.12.0" + +[feature.py313.dependencies] +python = "~=3.13.0" + +[feature.py314.dependencies] +python = "~=3.14.0" + +[environments] +default = { features = ["dev"], solve-group = "default" } +py310 = { features = ["dev", "py310"], solve-group = "py310" } +py311 = { features = ["dev", "py311"], solve-group = "py311" } +py312 = { features = ["dev", "py312"], solve-group = "py312" } +py313 = { features = ["dev", "py313"], solve-group = "py313" } +py314 = { features = ["dev", "py314"], solve-group = "py314" } + +# ══════════════════════════════════════════════════════════════════════════════ +# Tasks +# ══════════════════════════════════════════════════════════════════════════════ + +[tasks] + +# ────────────────────────────────────────────────────────────────────────────── +# Testing +# ────────────────────────────────────────────────────────────────────────────── +test = { cmd = "pytest gridappsd-python-lib/tests -v -m 'not integration'", description = "Run main library tests" } +test-cov = { cmd = "pytest gridappsd-python-lib/tests -v -m 'not integration' --cov=gridappsd --cov-report=term-missing --cov-report=html", description = "Run tests with coverage" } +test-field-bus = { cmd = "pytest gridappsd-field-bus-lib/tests -v", description = "Run field bus tests" } +test-all = { depends-on = ["test", "test-field-bus"], description = "Run all tests" } +test-integration = { cmd = "pytest gridappsd-python-lib/tests -v -m integration", description = "Run integration tests only" } + +# ────────────────────────────────────────────────────────────────────────────── +# Code Quality +# ────────────────────────────────────────────────────────────────────────────── +lint = { cmd = "ruff check gridappsd-python-lib/gridappsd gridappsd-field-bus-lib/gridappsd_field_bus", description = "Run linter" } +lint-fix = { cmd = "ruff check --fix gridappsd-python-lib/gridappsd gridappsd-field-bus-lib/gridappsd_field_bus", description = "Run linter with auto-fix" } +format = { cmd = "ruff format gridappsd-python-lib/gridappsd gridappsd-field-bus-lib/gridappsd_field_bus", description = "Format code" } +format-check = { cmd = "ruff format --check gridappsd-python-lib/gridappsd gridappsd-field-bus-lib/gridappsd_field_bus", description = "Check code formatting" } +typecheck = { cmd = "mypy gridappsd-python-lib/gridappsd gridappsd-field-bus-lib/gridappsd_field_bus --ignore-missing-imports", description = "Run type checker" } +check = { depends-on = ["lint", "format-check", "typecheck"], description = "Run all code quality checks" } + +# ────────────────────────────────────────────────────────────────────────────── +# Building +# ────────────────────────────────────────────────────────────────────────────── +build-lib = { cmd = "python -m build", cwd = "gridappsd-python-lib", description = "Build main library" } +build-field-bus = { cmd = "python -m build", cwd = "gridappsd-field-bus-lib", description = "Build field bus library" } +build-all = { depends-on = ["build-lib", "build-field-bus"], description = "Build all packages" } +collect-dist = { cmd = "mkdir -p dist && cp gridappsd-python-lib/dist/*.whl dist/ 2>/dev/null || true && cp gridappsd-python-lib/dist/*.tar.gz dist/ 2>/dev/null || true && cp gridappsd-field-bus-lib/dist/*.whl dist/ 2>/dev/null || true && cp gridappsd-field-bus-lib/dist/*.tar.gz dist/ 2>/dev/null || true", description = "Collect dist files to root" } +build = { depends-on = ["build-all", "collect-dist"], description = "Build all and collect artifacts" } + +# ────────────────────────────────────────────────────────────────────────────── +# Documentation +# ────────────────────────────────────────────────────────────────────────────── +docs = { cmd = "make html", cwd = "docs", description = "Build documentation" } +docs-serve = { cmd = "python -m http.server -d build/html 8000", cwd = "docs", description = "Serve documentation locally" } +docs-clean = { cmd = "make clean", cwd = "docs", description = "Clean documentation build" } + +# ────────────────────────────────────────────────────────────────────────────── +# Docker / Integration Environment +# ────────────────────────────────────────────────────────────────────────────── +docker-clone = { cmd = "git clone --depth 1 https://github.com/GRIDAPPSD/gridappsd-docker /tmp/gridappsd-docker 2>/dev/null || (cd /tmp/gridappsd-docker && git pull)", description = "Clone/update gridappsd-docker" } +docker-up = { cmd = "docker compose up -d", cwd = "/tmp/gridappsd-docker", depends-on = ["docker-clone"], description = "Start GridAPPS-D containers" } +docker-down = { cmd = "docker compose down", cwd = "/tmp/gridappsd-docker", description = "Stop GridAPPS-D containers" } +docker-logs = { cmd = "docker compose logs -f gridappsd", cwd = "/tmp/gridappsd-docker", description = "Follow GridAPPS-D logs" } + +# ────────────────────────────────────────────────────────────────────────────── +# Pre-commit +# ────────────────────────────────────────────────────────────────────────────── +pre-commit-install = { cmd = "pre-commit install", description = "Install pre-commit hooks" } +pre-commit-run = { cmd = "pre-commit run --all-files", description = "Run pre-commit on all files" } + +# ────────────────────────────────────────────────────────────────────────────── +# Cleaning +# ────────────────────────────────────────────────────────────────────────────── +clean = { cmd = "rm -rf dist/ build/ gridappsd-python-lib/dist gridappsd-python-lib/build gridappsd-field-bus-lib/dist gridappsd-field-bus-lib/build .pytest_cache .mypy_cache .ruff_cache htmlcov && find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true && find . -type d -name '*.egg-info' -exec rm -rf {} + 2>/dev/null || true", description = "Clean all build artifacts" } + +# ────────────────────────────────────────────────────────────────────────────── +# Publishing +# ────────────────────────────────────────────────────────────────────────────── +publish-test = { cmd = "twine upload --repository testpypi dist/*", description = "Publish to TestPyPI" } +publish = { cmd = "twine upload dist/*", description = "Publish to PyPI" } + +# ────────────────────────────────────────────────────────────────────────────── +# Convenience Workflows +# ────────────────────────────────────────────────────────────────────────────── +ci = { depends-on = ["check", "test-all"], description = "Run full CI pipeline" } +release = { depends-on = ["clean", "check", "test-all", "build"], description = "Full release workflow" } + +# ══════════════════════════════════════════════════════════════════════════════ +# Tool Configuration +# ══════════════════════════════════════════════════════════════════════════════ +# Note: Ruff configuration is in ruff.toml (not read from pixi.toml) + +[tool.mypy] +python_version = "3.10" +warn_return_any = true +warn_unused_ignores = true +ignore_missing_imports = true + +[tool.pytest.ini_options] +testpaths = ["gridappsd-python-lib/tests", "gridappsd-field-bus-lib/tests"] +addopts = "-v" +markers = [ + "integration: marks tests as integration tests (require docker)", +] diff --git a/poetry.toml b/poetry.toml deleted file mode 100644 index ab1033b..0000000 --- a/poetry.toml +++ /dev/null @@ -1,2 +0,0 @@ -[virtualenvs] -in-project = true diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 42fcc17..0000000 --- a/pyproject.toml +++ /dev/null @@ -1,29 +0,0 @@ -[tool.poetry] -name = "gridappsd-python-workspace" -version = "2025.6.0" -description = "A GridAPPS-D Python Adapter" -authors = [ - "C. Allwardt <3979063+craig8@users.noreply.github.com>", - "P. Sharma =2.0.0"] -build-backend = "poetry.core.masonry.api" - -[tool.poetry.requires-plugins] -poetry-plugin-export = ">=1.8" diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 0000000..0a24f2c --- /dev/null +++ b/ruff.toml @@ -0,0 +1,41 @@ +# Ruff configuration for gridappsd-python +# https://docs.astral.sh/ruff/ + +line-length = 120 + +[lint] +# Rules to enable +select = [ + "E", # pycodestyle errors + "F", # pyflakes + "W", # pycodestyle warnings +] + +# Rules to ignore for legacy code compatibility +ignore = [ + "E101", # Mixed spaces and tabs (some files have this in multiline strings) + "E402", # Module level import not at top of file + "E501", # Line too long (legacy code has long strings/docstrings) + "E722", # Do not use bare `except` + "F401", # Imported but unused (needed for re-exports in __init__.py) + "F811", # Redefinition of unused name + "F821", # Undefined name (some legacy code patterns) +] + +# Allow fix for all enabled rules (when `--fix`) is provided +fixable = ["ALL"] +unfixable = [] + +[lint.per-file-ignores] +# Allow unused imports in __init__.py files (re-exports) +"__init__.py" = ["F401"] + +[format] +# Use double quotes for strings +quote-style = "double" + +# Indent with spaces +indent-style = "space" + +# Unix-style line endings +line-ending = "auto" diff --git a/scripts/create_local_version.sh b/scripts/create_local_version.sh deleted file mode 100755 index 6e37c22..0000000 --- a/scripts/create_local_version.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/sh -# Usus dunamai to determine a semver compatible version for the current state of the project -# Useefull when building wheels in CI/CD on branches or merge requests, -# without possibly overwriting released versions (of certain tag) -# Used to run in CI/CD, as it will modify both pyproject.toml's and python files (by setting the right string in `__version__=..`) -set -x -set -u -set -e -DIR="$( cd "$( dirname "$0" )" && pwd )" -cd "${DIR}/.." || exit - -# first run directly, to have script stop if dunamai isn't available (for example if not installed, or running in wrong virtual env) -dunamai from any -VERSION=$(dunamai from any) -echo $VERSION - -# all python packages, in topological order -. ${DIR}/projects.sh -_projects=". ${PROJECTS}" -echo "Running on following projects: ${_projects}" -if [ "$(uname)" = "Darwin" ]; then export SEP=" "; else SEP=""; fi -for p in $_projects -do - echo "Creating local version of ${p}" - echo "$VERSION" > "${p}/VERSION" - sed -i$SEP'' "s/^version = .*/version = \"$VERSION\"/" "$p/pyproject.toml" -done -sed -i$SEP'' "s/^__version__.*/__version__ = \"$VERSION\"/" package-a/package_a/__init__.py -sed -i$SEP'' "s/^__version__.*/__version__ = \"$VERSION\"/" package-b/package_b/__init__.py -sed -i$SEP'' "s/^__version__.*/__version__ = \"$VERSION\"/" service-c/service_c/__init__.py diff --git a/scripts/poetry_build.sh b/scripts/poetry_build.sh deleted file mode 100755 index 3a909f5..0000000 --- a/scripts/poetry_build.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/bin/sh -# This script builds all the poetry packages, creating wheels, dists, and requirements.txt's -# All the wheels will be placed in both the root folder's dist, and in a dist folder within each package -set -x -set -u -set -e -DIR="$( cd "$( dirname "$0" )" && pwd )" -cd "${DIR}/.." || exit - -poetry version -VERSION=$(poetry version | awk '{print $2}') - -if [ "$(uname)" = "Darwin" ]; then export SEP=" "; else SEP=""; fi - -# all python packages, in topological order -. ${DIR}/projects.sh -_projects=$PROJECTS -echo "Running on following projects: ${_projects}" -for p in $_projects -do - cd "${DIR}/../${p}" || exit - # change path deps in project def - sed -i$SEP'' "s|{.*path.*|\"^$VERSION\"|" pyproject.toml - # include project changelog - cp ../CHANGELOG.md ./ - poetry build - # export deps, with updated path deps - mkdir -p info - poetry export -f requirements.txt --output ./info/requirements.txt --without-hashes --with-credentials - sed -i$SEP'' "s/ @ .*;/==$VERSION;/" "./info/requirements.txt" - ls -altr ./dist/ -done - -# -u for update -if [ "$(uname)" = "Darwin" ]; then export FLAG=" "; else FLAG="-u "; fi -echo "==========" -mkdir -p "${DIR}/../info" -cp $FLAG "${DIR}/../CHANGELOG.md" "${DIR}/../info/" -cp $FLAG "${DIR}/../VERSION" "${DIR}/../info/" -echo "==========" -# copying each wheel to root folder dist -mkdir -p "${DIR}/../dist" -for p in $_projects -do - ls -altr "${DIR}/../${p}/dist/" - cp $FLAG "${DIR}/../${p}/dist/"*".whl" "${DIR}/../dist/" - cp $FLAG "${DIR}/../${p}/dist/"*".tar.gz" "${DIR}/../dist/" -done -echo "==========" -ls -altr "${DIR}/../dist/" -# then copying these to each project -# for p in $_projects -# do -# cp $FLAG "${DIR}/../dist/"*".whl" "${DIR}/../${p}/dist/" -# cp $FLAG "${DIR}/../info/"*"" "${DIR}/../${p}/info/" -# ls -altr "${DIR}/../${p}/dist/" -# done diff --git a/scripts/poetry_install.sh b/scripts/poetry_install.sh deleted file mode 100755 index c139b4c..0000000 --- a/scripts/poetry_install.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh -# This script reflects the latest changes of pyproject.toml -# into both the poetry.lock file and the virtualenv. -# by running `poetry lock && poetry sync` -# It first configures poetry to use the right python for creation of the virtual env -set -x -set -u -set -e -DIR="$( cd "$( dirname "$0" )" && pwd )" -cd "${DIR}/.." || exit - -# all python packages, in topological order -. ${DIR}/projects.sh -_projects=". ${PROJECTS}" -echo "Running on following projects: ${_projects}" -for p in $_projects -do - cd "${DIR}/../${p}" || exit - poetry env use $(which python3) || poetry env use 3.8 - poetry lock && poetry sync -done diff --git a/scripts/poetry_update.sh b/scripts/poetry_update.sh deleted file mode 100755 index 5003d41..0000000 --- a/scripts/poetry_update.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh -# This script reflects the latest changes of pyproject.toml -# into both the poetry.lock file and the virtualenv. -# by running `poetry update && poetry install sync` -# It first configures poetry to use the right python for creation of the virtual env -set -x -set -u -set -e -DIR="$( cd "$( dirname "$0" )" && pwd )" -cd "${DIR}/.." || exit - -# all python packages, in topological order -. ${DIR}/projects.sh -_projects=". ${PROJECTS}" -echo "Running on following projects: ${_projects}" -for p in $_projects -do - cd "${DIR}/../${p}" || exit - poetry env use $(which python3) || poetry env use 3.8 - poetry update && poetry install sync -done diff --git a/scripts/projects.sh b/scripts/projects.sh deleted file mode 100644 index 94c9503..0000000 --- a/scripts/projects.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh - -# all python packages, in topological order -PROJECTS='gridappsd-field-bus-lib gridappsd-python-lib' diff --git a/scripts/replace_path_deps.sh b/scripts/replace_path_deps.sh deleted file mode 100755 index 48663d0..0000000 --- a/scripts/replace_path_deps.sh +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/sh -# This file shows how the sdist & wheel files can be manually modified afterwards -# to replace path dependencies with a version range. -# If the mono repo is at version 1.2.3, it will set the dependencies to (~=1.2,>=1.2.3), effectively equal to ~1.2.3. -set -x -set -u -set -e - -VERSION=$(poetry version | awk '{print $2}') -VERSION_MINOR=$(echo $VERSION | sed -E "s/^([0-9]*\.[0-9]*).*/\1/") -curdir=$(pwd) -if [ "$(uname)" = "Darwin" ]; then export SEP=" "; else SEP=""; fi - -# ===== Updating the TAR.GZ file ===== -cd "$curdir" -TARFILES=$(ls dist/*.tar.gz) -for TARFILE in $TARFILES -do - rm -rf /tmp/version_update - mkdir -p /tmp/version_update - tar -C /tmp/version_update -xf $curdir/$TARFILE - cd /tmp/version_update - # Replace the path dependencies (which are prefixed with '@') - # with compatible version to the current monorepo, but at least at the current one. - # In semver notation: ~1.2.3, which equals >=1.2.3, <2.0.0 - # Note that allowed matches are defined at: - # https://peps.python.org/pep-0440/#compatible-release - # We therefore specify that we require >=1.2.3 AND <2.0 - # Thus at least at the same fix version, but only compatible versions. - # Therefore we use ~=1.2, which equals >=1.2,<2.0, together with >=1.2.3 - FOLDER=$(ls) - sed -i$SEP'' "s|^Requires-Dist: \(.*\) @ \.\./.*|Requires-Dist: \1 (~=$VERSION_MINOR,>=$VERSION)|" "$FOLDER/PKG-INFO" - sed -i$SEP'' "s| @ \.\.[a-zA-Z\-_/]*|~=$VERSION_MINOR,>=$VERSION|" "$FOLDER/setup.py" - sed -i$SEP'' "s|{.*path.*\.\..*|\"~$VERSION\"|" "$FOLDER/pyproject.toml" - tar -czvf new.tar.gz "$FOLDER" - mv new.tar.gz $curdir/$TARFILE -done - -# ===== Updating the WHEEL file ===== -# Handle the tar.gz -cd "$curdir" -WHEELFILES=$(ls dist/*.whl) -for WHEELFILE in $WHEELFILES -do - rm -rf /tmp/version_update - mkdir -p /tmp/version_update - unzip -d /tmp/version_update $curdir/$WHEELFILE - cd /tmp/version_update - # Replace the path dependencies (which are prefixed with '@') - # with compatible version to the current monorepo, but at least at the current one. - # In semver notation: ~1.2.3, which equals >=1.2.3, <2.0.0 - # Note that allowed matches are defined at: - # https://peps.python.org/pep-0440/#compatible-release - # We therefore specify that we require >=1.2.3 AND <2.0 - # Thus at least at the same fix version, but only compatible versions. - # Therefore we use ~=1.2, which equals >=1.2,<2.0, together with >=1.2.3 - FOLDER=$(ls -d *.dist-info) - sed -i$SEP'' "s|^Requires-Dist: \(.*\) @ \.\./.*|Requires-Dist: \1 (~=$VERSION_MINOR,>=$VERSION)|" "$FOLDER/METADATA" - zip -r new.whl ./* - mv new.whl "$curdir/$WHEELFILE" - cd "$curdir" - rm -rf /tmp/version_update -done diff --git a/scripts/run_on_each.sh b/scripts/run_on_each.sh deleted file mode 100755 index 2722660..0000000 --- a/scripts/run_on_each.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh -# runs the passed command in each poetry project folder -set -x -set -u -set -e -DIR="$( cd "$( dirname "$0" )" && pwd )" -cd "${DIR}/.." || exit - -# all python packages, in topological order -. ${DIR}/projects.sh -_projects=". ${PROJECTS}" -echo "Running on following projects: ${_projects}" -for p in $_projects -do - cd "${DIR}/../${p}" || exit - echo "==running in ${p}==" - "$@" -done diff --git a/temp.py b/temp.py new file mode 100644 index 0000000..0fcbf9b --- /dev/null +++ b/temp.py @@ -0,0 +1,63 @@ +from gridappsd import GridAPPSD +import os + +os.environ["GRIDAPPSD_USER"] = "system" +os.environ["GRIDAPPSD_PASSWORD"] = "manager" + + +gapps = GridAPPSD() + +topic = 'goss.gridappsd.process.request.simulation' + +request = {"power_system_configs": + [{"SubGeographicalRegion_name": "Medium", + "GeographicalRegion_name": "IEEE", + "Line_name": "C1C3E687-6FFD-C753-582B-632A27E28507", + "simulator_config": {"simulator": "GridLAB-D", + "simulation_output": {}, + "power_flow_solver_method": "NR", + "model_creation_config": + {"load_scaling_factor": 1.0, + "triplex": "y", + "encoding": "u", + "system_frequency": 60, + "voltage_multiplier": 1.0, + "power_unit_conversion": 1.0, + "unique_names": "y", + "z_fraction": 0.0, + "i_fraction": 1.0, + "p_fraction": 0.0, + "randomize_zipload_fractions": False, + "use_houses": False}}}, + {"SubGeographicalRegion_name": "Small", + "GeographicalRegion_name": "IEEE", + "Line_name": "49AD8E07-3BF9-A4E2-CB8F-C3722F837B62", + "simulator_config": {"simulator": "GridLAB-D", + "simulation_output": {}, + "power_flow_solver_method": "NR", + "model_creation_config": + {"load_scaling_factor": 1.0, + "triplex": "y", + "encoding": "u", + "system_frequency": 60, + "voltage_multiplier": 1.0, + "power_unit_conversion": 1.0, + "unique_names": "y", + "z_fraction": 0.0, + "i_fraction": 1.0, + "p_fraction": 0.0, + "randomize_zipload_fractions": False, + "use_houses": False}}}], + "simulation_config": {"duration": 7200, + "start_time": 1758234830, + "run_realtime": True, + "pause_after_measurements": False, + "simulation_broker_port": 5570, + "simulation_broker_location": "127.0.0.1", + "simulation_name":"testing-simulation"}, + "simulation_id": "simulation-id-12345"} + + +response = gapps.get_response(topic, request, timeout=30) + +print(response) diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 4a40889..0000000 --- a/tox.ini +++ /dev/null @@ -1,5 +0,0 @@ -[flake8] -# ignore = D203 -exclude = .git,__pycache__,docs/source/conf.py,old,build,dist, *migrations* -max-complexity = 10 -max-line-length = 120