Skip to content

CURA-9830 consolidate polygon classes #646

CURA-9830 consolidate polygon classes

CURA-9830 consolidate polygon classes #646

Workflow file for this run

name: GcodeAnalyzer
on:
push:
paths:
- 'include/**'
- 'src/**'
- '.github/workflows/gcodeanalyzer.yml'
branches:
- main
pull_request:
types: [ opened, reopened, synchronize ]
paths:
- 'include/**'
- 'src/**'
- '.github/workflows/gcodeanalyzer.yml'
branches:
- main
- 'CURA-*'
- 'PP-*'
- 'NP-*'
- '[0-9]+.[0-9]+'
permissions:
contents: write
deployments: write
env:
CONAN_LOGIN_USERNAME: ${{ secrets.CONAN_USER }}
CONAN_PASSWORD: ${{ secrets.CONAN_PASS }}
# TODO: Make cura-workflows/benchmark.yml generic enough to fit the gcodeanalyzer benchmark
jobs:
check_actor:
uses: ultimaker/cura-workflows/.github/workflows/check-actor.yml@main
secrets: inherit
conan-recipe-version:
needs: [ check_actor ]
if: ${{ needs.check_actor.outputs.proceed == 'true' }}
uses: ultimaker/cura-workflows/.github/workflows/conan-recipe-version.yml@main
with:
project_name: curaengine
gcodeanalyzer:
needs: [ conan-recipe-version ]
name: Run GCodeAnalyzer on the engine
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
with:
path: 'CuraEngine'
fetch-depth: 1
ref: ${{ github.head_ref }}
- name: Checkout repo PR
uses: actions/checkout@v4
if: ${{ github.event.pull_request.head.repo.full_name != github.repository }}
with:
path: 'CuraEngine'
fetch-depth: 1
ref: ${{ github.base_ref }}
- name: Checkout GCodeAnalyzer
uses: actions/checkout@v3
with:
repository: 'Ultimaker/GCodeAnalyzer'
ref: 'main'
path: 'GCodeAnalyzer'
fetch-depth: 1
token: ${{ secrets.CURA_BENCHMARK_PAT }}
- name: Checkout Test Models
uses: actions/checkout@v3
with:
repository: 'Ultimaker/NightlyTestModels'
ref: 'main'
path: 'NightlyTestModels'
fetch-depth: 1
token: ${{ secrets.GITHUB_TOKEN }}
- name: Determine the corresponding Cura branch
id: curabranch
run: |
status_code=$(curl -s -o /dev/null -w "%{http_code}" "https://api.github.com/repos/ultimaker/cura/branches/${{ github.head_ref }}")
if [ "$status_code" -eq 200 ]; then
echo "The branch exists in Cura"
echo "branch=${{ github.head_ref }}" >> $GITHUB_OUTPUT
else
echo "branch=main" >> $GITHUB_OUTPUT
fi
- name: Checkout Cura
uses: actions/checkout@v3
with:
repository: 'Ultimaker/Cura'
ref: ${{ steps.curabranch.outputs.branch}}
path: 'Cura'
fetch-depth: 1
sparse-checkout: |
resources/definitions
resources/extruders
- name: Sync pip requirements
run: curl -O https://raw.githubusercontent.com/Ultimaker/cura-workflows/main/.github/workflows/requirements-runner.txt
working-directory: CuraEngine/.github/workflows
- name: Setup Python and pip
uses: actions/setup-python@v4
with:
python-version: 3.11.x
cache: pip
cache-dependency-path: CuraEngine/.github/workflows/requirements-runner.txt
- name: Install Python requirements and Create default Conan profile
run: |
pip install -r CuraEngine/.github/workflows/requirements-runner.txt
pip install wheel numpy pandas python-dateutil pytz six
pip install git+https://github.com/ultimaker/libcharon@CURA-9495_analyzer_requisites#egg=charon
pip install pytest pytest-benchmark
- name: Install Linux system requirements for building
run: |
mkdir runner_scripts
cd runner_scripts
curl -O https://raw.githubusercontent.com/Ultimaker/cura-workflows/main/runner_scripts/ubuntu_setup.sh
chmod +x ubuntu_setup.sh
sudo ./ubuntu_setup.sh
- name: Setup pipeline caches
run: |
mkdir -p /home/runner/.conan/downloads
mkdir -p /home/runner/.conan/data
- name: Create default Conan profile
run: conan profile new default --detect
- name: Get Conan configuration
run: |
conan config install https://github.com/Ultimaker/conan-config.git
conan config install https://github.com/Ultimaker/conan-config.git -a "-b runner/${{ runner.os }}/${{ runner.arch }}"
- name: Cache Conan packages
uses: actions/cache@v3
with:
path: /home/runner/.conan/data
key: ${{ runner.os }}-conan-data-${{ github.run_id }}
restore-keys: |
${{ runner.os }}-conan-data-
- name: Cache Conan downloads
uses: actions/cache@v3
with:
path: /home/runner/.conan/downloads
key: ${{ runner.os }}-conan-downloads-${{ github.run_id }}
restore-keys: |
${{ runner.os }}-conan-downloads-
- name: Install dependencies
run: conan install . ${{ needs.conan-recipe-version.outputs.recipe_id_full }} -s build_type=Release --build=missing --update -g GitHubActionsRunEnv -g GitHubActionsBuildEnv -c tools.build:skip_test=True
working-directory: CuraEngine
- name: Set Environment variables from Conan install (bash)
if: ${{ runner.os != 'Windows' }}
run: |
. ./activate_github_actions_runenv.sh
. ./activate_github_actions_buildenv.sh
echo "CURA_ENGINE_SEARCH_PATH=$GITHUB_WORKSPACE/Cura/resources/definitions:$GITHUB_WORKSPACE/Cura/resources/extruders" >> $GITHUB_ENV
working-directory: CuraEngine/build/Release/generators
- name: Build CuraEngine and tests
run: |
cmake --preset release
cmake --build --preset release
working-directory: CuraEngine
- name: Collect STL-files, run CuraEngine, output GCode-files
run: |
for file in `ls ../NightlyTestModels/*.stl`;
do
( time ./build/Release/CuraEngine slice --force-read-parent --force-read-nondefault -v -p -j ../Cura/resources/definitions/ultimaker_s3.def.json -l $file -o ../`basename $file .stl`.gcode ) 2> ../`basename $file .stl`.time
done
working-directory: CuraEngine
# TODO: Move this to GCodeAnalyzer
- name: Run GCodeAnalyzer on generated GCode files
id: gcode_out
run: |
import sys
import os
import json
import re
from Charon.filetypes.GCodeFile import GCodeFile
sys.path.append(".")
import GCodeAnalyzer
GCodeFile.SkipHeaderValidation = True
folder_path = ".."
jzon = []
for filename in os.listdir(folder_path):
basename = os.path.basename(filename)
_base, ext = os.path.splitext(basename)
if ext.lower() != ".gcode":
continue
infilename = os.path.join(folder_path, filename)
with open(infilename.rsplit(".", 1)[0] + ".time", "r") as f:
match = re.match(r"^.*?real\s+(\d+)m(\d+\.\d+)s", f.read())
timer = 0 if not match else float(match[1])
frame = GCodeAnalyzer.DataFrame(infilename)
line_lengths = frame.gc.extrusions['length'].describe()
all_lengths = frame.gc.travels['length'].describe()
extrusion_values = frame.gc.extrusions['E'].describe()
temperatures = frame['T_0_nozzle'].describe()
print_time = frame.time.max()
no_retractions = len(frame.gc.retractions)
total_travel_length = frame.gc.travels.length.sum()
# microsegments violations
queue = 16
seg_sec = 80
violation_threshold = queue / seg_sec
microsegments_wall_skin = frame.gc.extrusions.duration[(frame.type == 'WALL-OUTER') | (frame.type == 'WALL-INNER') | (frame.type == 'SKIN')].rolling(queue).sum()
no_violations_wall_skin = microsegments_wall_skin[microsegments_wall_skin < violation_threshold].count()
microsegments_infill = frame.gc.extrusions.duration[frame.type == 'FILL'].rolling(queue).sum()
no_violations_infill = microsegments_infill[microsegments_infill < violation_threshold].count()
jzon += [
{
"name": f"Print time {basename}",
"unit": "s",
"value": print_time,
},
{
"name": f"Microsegment violations in wall-skin {basename}",
"unit": "-",
"value": int(no_violations_wall_skin),
},
{
"name": f"Microsegment violations in infill {basename}",
"unit": "-",
"value": int(no_violations_infill),
},
{
"name": f"Number of retractions {basename}",
"unit": "-",
"value": no_retractions,
},
{
"name": f"Total travel length {basename}",
"unit": "mm",
"value": total_travel_length,
},
{
"name": f"Minimum Line Length {basename}",
"unit": "mm",
"value": line_lengths["min"],
},
{
"name": f"Line Lengths 25 Percentile {basename}",
"unit": "mm",
"value": line_lengths["25%"],
},
{
"name": f"Minimum All Distances {basename}",
"unit": "mm",
"value": all_lengths["min"],
},
{
"name": f"All Distances 25 Percentile {basename}",
"unit": "mm",
"value": all_lengths["25%"],
},
{
"name": f"Extrusion-Axis {basename}",
"unit": "mm",
"value": extrusion_values["min"],
},
{
"name": f"Extrusion Lengths 25 Percentile {basename}",
"unit": "mm",
"value": extrusion_values["25%"],
},
{
"name": f"Number Of Temperature Commands {basename}",
"unit": "#",
"value": temperatures["count"],
},
{
"name": f"Mean Temperature {basename}",
"unit": "Celcius",
"value": temperatures["50%"],
},
{
"name": f"Slicing time {basename}",
"unit": "s",
"value": timer,
},
]
with open("../output.json", "w") as outfile:
outfile.write(json.dumps(jzon))
shell: python
working-directory: GCodeAnalyzer
- name: Store benchmark result
uses: benchmark-action/github-action-benchmark@v1
with:
name: CGcodeAnalyzer
output-file-path: output.json
gh-repository: github.com/Ultimaker/CuraEngineBenchmarks
gh-pages-branch: main
benchmark-data-dir-path: dev/gcodeanalyzer
tool: customBiggerIsBetter
github-token: ${{ secrets.CURA_BENCHMARK_PAT }}
auto-push: true
max-items-in-chart: 250