Create boilerplate template from example #1

Merged
AllSpiceAlice merged 11 commits from dd/dev into main 2024-07-22 01:14:25 +00:00
10 changed files with 328 additions and 291 deletions

View File

@ -0,0 +1 @@
# Mock config yaml file

View File

@ -0,0 +1 @@
Mock input file

View File

@ -0,0 +1,22 @@
name: Example AllSpice Add-on Template
on: [push, pull_request]
jobs:
hardware-devops:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Run Hardware DevOps Action
uses: https://hub.allspice.io/AllSpice-Demos/Add-on-template@v0.1
with:
source_path: ".allspice/examples/input.txt"
output_file_name: "output.txt"
config_file: ".allspice/examples/config.yml"
task_type: "Schematic-Review"
additional_params: '{"SCH_VER":"3"}'
env:
ALLSPICE_TOKEN: ${{ allspice.token }}

90
Add-on-script.py Executable file
View File

@ -0,0 +1,90 @@
#!/usr/bin/env python3
import argparse
import os
import sys
import json
from allspice import AllSpice
def hello_world(task_type, source_file, output_file, additional_params):
print(f"Hello, World! Performing task: {task_type}")
print(f"Source file: {source_file}")
print(f"Output file: {output_file}")
print(f"Additional parameters: {additional_params}")
# Parse additional_params
params = json.loads(additional_params)
sch_ver = params.get("SCH_VER", "3")
print(f"Schematic version: {sch_ver}")
def test_allspice_connection(allspice_hub_url, auth_token):
try:
allspice = AllSpice(token_text=auth_token, allspice_hub_url=allspice_hub_url)
print("AllSpice Version: " + allspice.get_version())
print("API-Token belongs to user: " + allspice.get_user().username)
except Exception as e:
print(f"Error connecting to AllSpice API: {e}")
sys.exit(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
prog="hardware_devops_action",
description="Perform hardware development tasks such as Schematic-Review, PCB-Review, ECO-Review, and Release."
)
parser.add_argument(
"--source_file",
help="The path to the source file used for the task. Example: 'Archimajor.PrjPcb', 'Schematics/Beagleplay.dsn'."
)
parser.add_argument(
"--task_type",
help="The type of hardware task to perform. Options include 'Schematic-Review', 'PCB-Review', 'ECO-Review', 'Release'.",
default="Schematic-Review",
)
parser.add_argument(
"--source_ref",
help="The git reference the task should be performed for (eg. branch name, tag name, commit SHA).",
default="main",
)
parser.add_argument(
"--server_url",
help="The URL of your GitHub server instance.",
)
parser.add_argument(
"--output_file",
help="The path to the output file. If absent, the output will be printed to the command line.",
)
parser.add_argument(
"--additional_params",
help="Any additional parameters required for the task, provided as a JSON string.",
default="{}",
)
parser.add_argument(
"--allspice_token",
help="Your AllSpice application token. Generate a token: https://hub.allspice.io/user/settings/applications",
)
parser.add_argument(
"--config_file",
help="Path to config file"
)
parser.add_argument(
"--input_file",
help="path/to/input_file"
)
args = parser.parse_args()
auth_token = os.environ.get("ALLSPICE_TOKEN") or args.allspice_token
if auth_token is None:
print("Please set the environment variable ALLSPICE_TOKEN or supply a token with --allspice_token <your_token>. Generate a token: https://hub.allspice.io/user/settings/applications")
exit(1)
# Test connection to AllSpice API
test_allspice_connection(args.server_url, auth_token)
# Perform the Hello World task
hello_world(
task_type=args.task_type,
source_file=args.source_file,
output_file=args.output_file,
additional_params=args.additional_params,
)

View File

@ -1,8 +1,8 @@
FROM python:3.12-bookworm
COPY requirements.txt /requirements.txt
COPY entrypoint.py /entrypoint.py
COPY Add-on-script.py /Add-on-script.py
RUN pip install -r /requirements.txt
ENTRYPOINT [ "/entrypoint.py" ]
ENTRYPOINT [ "/Add-on-script.py" ]

311
README.md
View File

@ -1,141 +1,188 @@
# Generate BOM for E-CAD Projects
# AllSpice Actions Add-on template
Generate a BOM output file for an Altium project on AllSpice Hub using [AllSpice Actions](https://learn.allspice.io/docs/actions-cicd).
This Add-on shows you how to create an Add-on, and how to set up the files to make an API to pass inputs to the Add-on.
## Table of Contents
- [Usage](#usage)
- [Inputs](#inputs)
- [Outputs](#outputs)
- [Example Workflow](#example-workflow)
- [License](#license)
## Usage
Add the following steps to your actions:
### Calling this add-on
This Add-on can be called from an external repository workflow file.
There is an example workflow file in this repository:
[.allspice/workflows/add-on-workflow-example.yml](.allspice/workflows/add-on-workflow-example.yml)
You can copy this workflow file to another repository and use it to call this Add-on template.
### Define API in action.yml
The file [action.yml](action.yml) describes how to connect your workflow call of the add-on to the actual script and specifies how parameters are used.
This is considered an API contract.
Below is the `action.yml` file for this repo. You can see that the `inputs` section maps inputs to input variables.
The second section composes the args from inputs and other values and passes it to the Dockerfile.
```yaml
name: "Hardware DevOps Action"
description: >
A generic AllSpice Action Add-on for hardware development tasks such as schematic review,
PCB review, ECO review, and release. This action demonstrates defining parameters
for these tasks and utilizing GitHub context information.
inputs:
source_file:
description: >
Path to the source file or directory from the root of the repo. For example,
the path to a schematic or PCB file.
required: true
output_file_name:
description: "Name of the output file"
required: true
default: "output.txt"
config_file:
description: >
Path to a configuration file for the task.
required: true
task_type:
description: >
The type of hardware task to perform. Options include 'Schematic-Review',
'PCB-Review', 'ECO-Review', 'Release'.
default: "Schematic-Review"
additional_params:
description: >
Any additional parameters required for the task, provided as a JSON string.
default: "{}"
runs:
using: "docker"
image: "Dockerfile"
args:
- "--source_file"
- "${{ inputs.source_file}}"
- "--output_file"
- "${{ github.workspace}}/${{ inputs.output_file_name }}"
- "--config_file"
- ${{ inputs.config_file }}
- "--task_type"
- ${{ inputs.task_type }}
- "--additional_params"
- ${{ inputs.additional_params }}
- "--source_ref"
- ${{ allspice.sha }}
- "--server_url"
- ${{ allspice.server_url }}
- "--allspice_token"
- ${{ secrets.ALLSPICE_TOKEN }}
env:
GITHUB_TOKEN: ${{ github.token }}
```
### Define Add-on script
[`Add-on-script.py`](Add-on-script.py)
This is the program that you will use to perform your Add-on. In this case, we use Python and the py-allspice API wrapper.
The first part of the program is parsing the arguments from the API contract, and the second part runs your actual Add-on. In this template example the Python script performs a connection test to AllSpice, and displays the parameters passed from the calling Workflow file.
### Define Dockerfile
`Dockerfile`
The Dockerfile specifies how to set up the environment and what file to run as the Add-on script.
In this repo template, we load Python 3.12, install modules from requirements.py, and thn run Add-on-script.py.
```Dockerfile
FROM python:3.12-bookworm
COPY requirements.txt /requirements.txt
COPY Add-on-script.py /Add-on-script.py
RUN pip install -r /requirements.txt
ENTRYPOINT [ "/Add-on-script.py" ]
```
### requirements.txt
The requirements.txt file specifies which Python modules to load and which version to load.
module-name=version#
`py-allspice` AllSpices native Python wrapper to the AllSpice API.
`pyyaml` A YAML markdown language processor. Helps parse workflow.yml files.
```
py-allspice==3.3.0
pyyaml~=6.0
```
### Testing files
This repo has an optional Action workflow that checks the syntax of Add-on-script.py. This is helpful because Python is an interpreted language.
You do not need these files to run your Add-on, however using tests will help you spot errors.
- `.allspice/dependabot.yml` - Instructions for repository workflow tests.
- `.allspice/workflows/test.yml` - Workflow to test this repo on design review.
- Lints 3 different versions of python (Checks syntax)
- `pyproject.toml` - Linter setup. Specifies how the repository workflow tests will check the syntax of this repository
- `requirements-test.txt` - the requirements for the test workflow.
## Add-on input API
You can customize your Add-on inputs to match your own workflow. These are some example inputs that are helpful for running Add-ons.
You can have as many or as few inputs as you need for your workflow.
- `source_file`: The path to the source file used for the task. Example: `Archimajor.PrjPcb`, `Schematics/Beagleplay.dsn`.
- `task_type`: The type of hardware task to perform. Options include `Schematic-Review`, `PCB-Review`, `ECO-Review`, `Release`. (default: `Schematic-Review`)
- `source_ref`: The git reference the task should be performed for (e.g., branch name, tag name, commit SHA). (default: `main`)
- `server_url`: The URL of your AllSpice server instance.
- `output_file`: The path to the output file. If absent, the output will be printed to the command line.
- `additional_params`: Any additional parameters required for the task, provided as a JSON string. (default: `{}`)
- `allspice_token`: Your AllSpice application token. Generate a token at: https://hub.allspice.io/user/settings/applications
- `config_file`: Path to the config file.
- `input_file`: Path to the input file.
## Outputs
The outputs are dependent on the task performed and will be printed to the command line or saved to the specified `output_file`.
In this template repo, there are no actual outputs, however the name of the output is displayed to demnostrate the correct passing of inputs to the script.
## Example Workflow
Here is the file [./allspice/workflows/add-on-workflow-example.yml](./allspice/workflows/add-on-workflow-example.yml)
This shows how to call the Add-on in this repo.
```yaml
# Checkout is only needed if columns.yml is committed in your Altium project repo.
- name: Checkout
uses: actions/checkout@v3
name: Example AllSpice Add-on Template
- name: Generate BOM
uses: https://hub.allspice.io/Actions/generate-bom@v0.4
on: [push, pull_request]
jobs:
hardware-devops:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Run Hardware DevOps Action
uses: https://hub.allspice.io/AllSpice-Demos/Add-on-template@v0.1
with:
# The path to the project file in your repo (.PrjPcb for Altium, .DSN for OrCad).
source_path: Archimajor.PrjPcb
# [optional] A path to a YAML file mapping columns to the component
# attributes they are from.
# Default: 'columns.yml'
columns: .allspice/columns.yml
# [optional] The path to the output file that will be generated.
# Default: 'bom.csv'
output_file_name: bom.csv
# [optional] A comma-separated list of columns to group the BOM by. If empty
# or not present, the BOM will be flat.
# Default: ''
group_by: "Part ID"
# [optional] The variant of the project to generate the BOM for. If empty
# or not present, the BOM will be generated for the default variant.
# Default: ''
variant: ""
source_path: ".allspice/examples/input.txt"
output_file_name: "output.txt"
config_file: ".allspice/examples/config.yml"
task_type: "Schematic-Review"
additional_params: '{"SCH_VER":"3"}'
env:
ALLSPICE_TOKEN: ${{ allspice.token }}
```
### Customizing the Attributes Extracted by the BOM Script
This script relies on a YAML file to specify the columns in the BOM and which
attributes or properties of the components they are populated from. This file is
typically called `columns.yml` and can be checked into your repo. To learn more
about YAML, [check out the AllSpice Knowledge Base.](https://learn.allspice.io/docs/yaml)
The format of this YAML file is as follows:
```yml
columns:
- name: "Manufacturer"
part_attributes:
- "Manufacturer"
- "MANUFACTURER"
- name: "Part Number"
part_attributes:
- "PART"
- "MANUFACTURER #"
- "_part_id"
- name: "Designator"
part_attributes: "Designator"
- name: "Description"
part_attributes:
- "PART DESCRIPTION"
- "_description"
```
First, you have the key `columns:` which is mapped to a list. Each element of
the list has two key/value pairs. The first is `name`, which will be the column
name in the output file. Next, you have `part_attributes`. This can either be
just a string (like in the case of the `Designator` column) or a list of strings
(like in the other cases).
If `part_attributes` is a string, that property or attribute of the component
is used as the value for that column. If that property is not present
in a particular part, that column will be blank for that part. If
`part_attributes` is a list, those properties will be checked in the order they
are defined for each part. The _first_ property found is used as the value for
that column in the row for that part. So if both `PART` and `MANUFACTURER #` are
defined, it will use `PART`.
An example for OrCad `columns.yml` file content is:
```yml
columns:
- name: "Part Number"
part_attributes:
- "Part Number"
- "_name"
- name: "Designator"
part_attributes: "Part Reference"
- name: "Type"
part_attributes: "Part Type"
- name: "Value"
part_attributes: "Value"
```
By default, the action will pick up a `columns.yml` file from the working
directory. If you want to keep it in a different place or rename it, you can
pass the `--columns` argument to the step in the workflow to specify where it
is.
### Py-allspice injected attributes
Note that py-allspice also adds a few static attributes, which are taken from
the part itself, and not from the properties or attributes. For Altium projects,
`_part_id` and `_description` are available, which correspond to the Library
Reference and Description fields of the component. For OrCAD projects, `_name`
is available, which corresponds to the name of the component.
The underscore is added ahead of the name to prevent these additional attributes
from overriding any of your own.
## Group By
You can also group lines by a column value. The most common is `_part_id`. You
can combine this with the columns YAML example above, like so:
```yaml
- name: Generate BOM
uses: https://hub.allspice.io/Actions/generate-bom@v0.4
with:
project_path: Archimajor.PrjPcb
columns: .allspice/columns.yml
group_by: "Part ID"
```
Which will generate a BOM squashed by components with matchin Part IDs.
## Variants
To generate the BOM for a variant of the project, pass the `--variant` argument
to the script. For example:
```yaml
- name: Generate BOM
uses: https://hub.allspice.io/Actions/generate-bom@v0.4
with:
project_path: Archimajor.PrjPcb
columns: .allspice/columns.yml
output_file_name: bom-lite.csv
variant: "LITE"
```
When no variant is given, the BOM is generated without considering any variants.

View File

@ -1,53 +1,52 @@
name: "Generate BOM"
name: "Hardware DevOps Action"
description: >
Generate a BOM for the project using py-allspice and attach it as an artifact
to the run.
Works for Altium and OrCAD projects.
A generic AllSpice Action Add-on for hardware development tasks such as schematic review,
PCB review, ECO review, and release. This action demonstrates defining parameters
for these tasks and utilizing GitHub context information.
inputs:
source_path:
source_file:
description: >
Path to the source file from the root of the repo. For Altium projects,
this should be the path to the .PrjPcb file. For OrCAD projects, this
should be the path to the .dsn file.
Path to the source file or directory from the root of the repo. For example,
the path to a schematic or PCB file.
required: true
output_file_name:
description: "Name of the output file"
required: true
default: "bom.csv"
columns:
default: "output.txt"
config_file:
description: >
A path to a JSON file mapping columns to the attributes they are from.
Path to a configuration file for the task.
required: true
group_by:
task_type:
description: >
A comma-separated list of columns to group the BOM by. If not present, the
BOM will be flat.
default: ""
variant:
The type of hardware task to perform. Options include 'Schematic-Review',
'PCB-Review', 'ECO-Review', 'Release'.
default: "Schematic-Review"
additional_params:
description: >
The variant of the project to generate the BOM for. If not present, the
BOM will be generated for the default variant. Not supported for OrCAD
projects.
default: ""
Any additional parameters required for the task, provided as a JSON string.
default: "{}"
runs:
using: "docker"
image: "Dockerfile"
args:
- "--source_ref"
- ${{ github.sha }}
- "--allspice_hub_url"
- ${{ github.server_url }}
- "--columns"
- ${{ inputs.columns }}
- "--group_by"
- ${{ inputs.group_by }}
- "--variant"
- ${{ inputs.variant }}
- "--source_file"
- "${{ inputs.source_file}}"
- "--output_file"
- "${{ github.workspace}}/${{ inputs.output_file_name }}"
- ${{ github.repository }}
- ${{ inputs.source_path }}
env:
ALLSPICE_AUTH_TOKEN: ${{ github.token }}
- "--config_file"
- ${{ inputs.config_file }}
- "--task_type"
- ${{ inputs.task_type }}
- "--additional_params"
- ${{ inputs.additional_params }}
- "--source_ref"
- ${{ allspice.sha }}
- "--server_url"
- ${{ allspice.server_url }}
- "--allspice_token"
- ${{ secrets.ALLSPICE_TOKEN }}
env:
GITHUB_TOKEN: ${{ github.token }}

View File

@ -1,123 +0,0 @@
#! /usr/bin/env python3
# Generate a BOM from a PrjPcb file.
# For more information, read the README file in this directory.
import argparse
import csv
import os
import yaml
import sys
from contextlib import ExitStack
from allspice import AllSpice
from allspice.utils.bom_generation import generate_bom
if __name__ == "__main__":
parser = argparse.ArgumentParser(
prog="generate_bom", description="Generate a BOM from a project repository."
)
parser.add_argument(
"repository", help="The repo containing the project in the form 'owner/repo'"
)
parser.add_argument(
"source_file",
help=(
"The path to the source file used to generate the BOM. If this is an Altium project, "
"this should be the .PrjPcb file. For an OrCAD project, this should be the .dsn file. "
"Example: 'Archimajor.PrjPcb', 'Schematics/Beagleplay.dsn'."
),
)
parser.add_argument(
"--columns",
help=(
"A path to a YAML file mapping columns to the attributes they are from. See the README "
"for more details. Defaults to 'columns.yml'."
),
default="columns.yml",
)
parser.add_argument(
"--source_ref",
help=(
"The git reference the BOM should be generated for (eg. branch name, tag name, commit "
"SHA). Defaults to the main branch."
),
default="main",
)
parser.add_argument(
"--allspice_hub_url",
help="The URL of your AllSpice Hub instance. Defaults to https://hub.allspice.io.",
)
parser.add_argument(
"--output_file",
help="The path to the output file. If absent, the CSV will be output to the command line.",
)
parser.add_argument(
"--group_by",
help=(
"A comma-separated list of columns to group the BOM by. If not present, the BOM will "
"be flat."
),
)
parser.add_argument(
"--variant",
help=(
"The variant of the project to generate the BOM for. If not present, the BOM will be "
"generated for the default variant. This is not used for OrCAD projects."
),
)
args = parser.parse_args()
columns_file = args.columns
columns = {}
try:
with open(columns_file, "r") as f:
columns_data = yaml.safe_load(f.read())
for column_value in columns_data["columns"]:
columns[column_value["name"]] = column_value["part_attributes"]
except KeyError as e:
print(f"Error: columns file {columns_file} does not seem to be in the right format.")
print("Please refer to the README for more information.")
print(f"Caused by: {e}")
sys.exit(1)
auth_token = os.environ.get("ALLSPICE_AUTH_TOKEN")
if auth_token is None:
print("Please set the environment variable ALLSPICE_AUTH_TOKEN")
exit(1)
if args.allspice_hub_url is None:
allspice = AllSpice(token_text=auth_token)
else:
allspice = AllSpice(token_text=auth_token, allspice_hub_url=args.allspice_hub_url)
repo_owner, repo_name = args.repository.split("/")
repository = allspice.get_repository(repo_owner, repo_name)
group_by = args.group_by.split(",") if args.group_by else None
print("Generating BOM...", file=sys.stderr)
bom_rows = generate_bom(
allspice,
repository,
args.source_file,
columns,
group_by=group_by,
ref=args.source_ref if args.source_ref else "main",
variant=args.variant if args.variant else None,
)
with ExitStack() as stack:
keys = bom_rows[0].keys()
if args.output_file is not None:
f = stack.enter_context(open(args.output_file, "w"))
writer = csv.DictWriter(f, fieldnames=keys)
else:
writer = csv.DictWriter(sys.stdout, fieldnames=keys)
writer.writeheader()
writer.writerows(bom_rows)
print("Generated bom.", file=sys.stderr)