Compare commits
No commits in common. "ed021e3d815387b119c74d6bebc49b77cd523871" and "5d040061f41b77d8149ef0d37acde0a4be716059" have entirely different histories.
ed021e3d81
...
5d040061f4
28 changed files with 35 additions and 557 deletions
|
|
@ -2,7 +2,7 @@
|
|||
import numpy
|
||||
from numpy import pi
|
||||
|
||||
from masque import layer_t, Pattern, Circle, Arc, Ref
|
||||
from masque import layer_t, Pattern, Circle, Arc, Polygon, Ref
|
||||
from masque.repetition import Grid
|
||||
import masque.file.gdsii
|
||||
|
||||
|
|
|
|||
|
|
@ -55,7 +55,6 @@ from .pattern import (
|
|||
map_targets as map_targets,
|
||||
chain_elements as chain_elements,
|
||||
)
|
||||
from .utils.boolean import boolean as boolean
|
||||
|
||||
from .library import (
|
||||
ILibraryView as ILibraryView,
|
||||
|
|
|
|||
|
|
@ -311,7 +311,6 @@ class Pather(Builder, PatherMixin):
|
|||
# Fallback for dead pather: manually update the port instead of plugging
|
||||
port = self.pattern[portspec]
|
||||
port_rot = port.rotation
|
||||
assert port_rot is not None
|
||||
if ccw is None:
|
||||
out_rot = pi
|
||||
elif bool(ccw):
|
||||
|
|
@ -415,7 +414,6 @@ class Pather(Builder, PatherMixin):
|
|||
# Fallback for dead pather: manually update the port instead of plugging
|
||||
port = self.pattern[portspec]
|
||||
port_rot = port.rotation
|
||||
assert port_rot is not None
|
||||
out_port = Port((length, jog), rotation=pi, ptype=in_ptype)
|
||||
out_port.rotate_around((0, 0), pi + port_rot)
|
||||
out_port.translate(port.offset)
|
||||
|
|
|
|||
|
|
@ -520,7 +520,7 @@ class RenderPather(PatherMixin):
|
|||
ccw0 = jog > 0
|
||||
kwargs_no_out = (kwargs | {'out_ptype': None})
|
||||
try:
|
||||
t_port0, _ = tool.planL( ccw0, length / 2, in_ptype=in_ptype, **kwargs_no_out) # TODO length/2 may fail w/asymmetric ptypes
|
||||
t_port0, _ = tool.planL( ccw0, length / 2, in_ptype=in_ptype, **kwargs_no_out) # TODO length/2 may fail with asymmetric ptypes
|
||||
jog0 = Port((0, 0), 0).measure_travel(t_port0)[0][1]
|
||||
t_port1, _ = tool.planL(not ccw0, abs(jog - jog0), in_ptype=t_port0.ptype, **kwargs)
|
||||
jog1 = Port((0, 0), 0).measure_travel(t_port1)[0][1]
|
||||
|
|
|
|||
|
|
@ -643,7 +643,6 @@ class AutoTool(Tool, metaclass=ABCMeta):
|
|||
if out_transition is not None:
|
||||
out_ptype_actual = out_transition.their_port.ptype
|
||||
elif ccw is not None:
|
||||
assert bend is not None
|
||||
out_ptype_actual = bend.out_port.ptype
|
||||
elif not numpy.isclose(straight_length, 0):
|
||||
out_ptype_actual = straight.ptype
|
||||
|
|
|
|||
|
|
@ -502,61 +502,6 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||
]
|
||||
return polys
|
||||
|
||||
def layer_as_polygons(
|
||||
self,
|
||||
layer: layer_t,
|
||||
flatten: bool = True,
|
||||
library: Mapping[str, 'Pattern'] | None = None,
|
||||
) -> list[Polygon]:
|
||||
"""
|
||||
Collect all geometry effectively on a given layer as a list of polygons.
|
||||
|
||||
If `flatten=True`, it recursively gathers shapes on `layer` from all `self.refs`.
|
||||
`Repetition` objects are expanded, and non-polygon shapes are converted
|
||||
to `Polygon` approximations.
|
||||
|
||||
Args:
|
||||
layer: The layer to collect geometry from.
|
||||
flatten: If `True`, include geometry from referenced patterns.
|
||||
library: Required if `flatten=True` to resolve references.
|
||||
|
||||
Returns:
|
||||
A list of `Polygon` objects.
|
||||
"""
|
||||
if flatten and self.has_refs() and library is None:
|
||||
raise PatternError("Must provide a library to layer_as_polygons() when flatten=True")
|
||||
|
||||
polys: list[Polygon] = []
|
||||
|
||||
# Local shapes
|
||||
for shape in self.shapes.get(layer, []):
|
||||
for p in shape.to_polygons():
|
||||
# expand repetitions
|
||||
if p.repetition is not None:
|
||||
for offset in p.repetition.displacements:
|
||||
polys.append(p.deepcopy().translate(offset).set_repetition(None))
|
||||
else:
|
||||
polys.append(p.deepcopy())
|
||||
|
||||
if flatten and self.has_refs():
|
||||
assert library is not None
|
||||
for target, refs in self.refs.items():
|
||||
if target is None:
|
||||
continue
|
||||
target_pat = library[target]
|
||||
for ref in refs:
|
||||
# Get polygons from target pattern on the same layer
|
||||
ref_polys = target_pat.layer_as_polygons(layer, flatten=True, library=library)
|
||||
# Apply ref transformations
|
||||
for p in ref_polys:
|
||||
p_pat = ref.as_pattern(Pattern(shapes={layer: [p]}))
|
||||
# as_pattern expands repetition of the ref itself
|
||||
# but we need to pull the polygons back out
|
||||
for p_transformed in p_pat.shapes[layer]:
|
||||
polys.append(cast('Polygon', p_transformed))
|
||||
|
||||
return polys
|
||||
|
||||
def referenced_patterns(self) -> set[str | None]:
|
||||
"""
|
||||
Get all pattern namers referenced by this pattern. Non-recursive.
|
||||
|
|
@ -693,7 +638,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||
"""
|
||||
for entry in chain(chain_elements(self.shapes, self.labels, self.refs), self.ports.values()):
|
||||
cast('Positionable', entry).translate(offset)
|
||||
self._log_bulk_update(f"translate({offset!r})")
|
||||
self._log_bulk_update(f"translate({offset})")
|
||||
return self
|
||||
|
||||
def scale_elements(self, c: float) -> Self:
|
||||
|
|
@ -793,22 +738,18 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||
cast('Rotatable', entry).rotate(rotation)
|
||||
return self
|
||||
|
||||
def mirror_elements(self, axis: int = 0) -> Self:
|
||||
def mirror_elements(self, across_axis: int = 0) -> Self:
|
||||
"""
|
||||
Mirror each shape, ref, and port relative to its offset.
|
||||
Mirror each shape, ref, and port relative to (0,0).
|
||||
|
||||
Args:
|
||||
axis: Axis to mirror across
|
||||
0: mirror across x axis (flip y),
|
||||
1: mirror across y axis (flip x)
|
||||
across_axis: Axis to mirror across
|
||||
(0: mirror across x axis, 1: mirror across y axis)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
for entry in chain(chain_elements(self.shapes, self.refs), self.ports.values()):
|
||||
cast('Mirrorable', entry).mirror(axis=axis)
|
||||
self._log_bulk_update(f"mirror_elements({axis})")
|
||||
return self
|
||||
return self.flip_across(axis=across_axis)
|
||||
|
||||
def mirror(self, axis: int = 0) -> Self:
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -302,7 +302,9 @@ class PortList(metaclass=ABCMeta):
|
|||
raise PortError(f'Unrenamed ports would be overwritten: {duplicates}')
|
||||
|
||||
for kk, vv in mapping.items():
|
||||
if vv is None or vv != kk:
|
||||
if vv is None:
|
||||
self._log_port_removal(kk)
|
||||
elif vv != kk:
|
||||
self._log_port_removal(kk)
|
||||
|
||||
renamed = {vv: self.ports.pop(kk) for kk, vv in mapping.items()}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, cast, TYPE_CHECKING, Self, Literal
|
||||
from typing import Any, cast, TYPE_CHECKING, Self
|
||||
import copy
|
||||
import functools
|
||||
|
||||
|
|
@ -462,23 +462,3 @@ class Polygon(Shape):
|
|||
def __repr__(self) -> str:
|
||||
centroid = self.vertices.mean(axis=0)
|
||||
return f'<Polygon centroid {centroid} v{len(self.vertices)}>'
|
||||
|
||||
def boolean(
|
||||
self,
|
||||
other: Any,
|
||||
operation: Literal['union', 'intersection', 'difference', 'xor'] = 'union',
|
||||
scale: float = 1e6,
|
||||
) -> list['Polygon']:
|
||||
"""
|
||||
Perform a boolean operation using this polygon as the subject.
|
||||
|
||||
Args:
|
||||
other: Polygon, Iterable[Polygon], or raw vertices acting as the CLIP.
|
||||
operation: 'union', 'intersection', 'difference', 'xor'.
|
||||
scale: Scaling factor for integer conversion.
|
||||
|
||||
Returns:
|
||||
A list of resulting Polygons.
|
||||
"""
|
||||
from ..utils.boolean import boolean
|
||||
return boolean([self], other, operation=operation, scale=scale)
|
||||
|
|
|
|||
|
|
@ -20,7 +20,6 @@ def test_abstract_transform() -> None:
|
|||
abs_obj.rotate_around((0, 0), pi / 2)
|
||||
# (10, 0) rot 0 -> (0, 10) rot pi/2
|
||||
assert_allclose(abs_obj.ports["A"].offset, [0, 10], atol=1e-10)
|
||||
assert abs_obj.ports["A"].rotation is not None
|
||||
assert_allclose(abs_obj.ports["A"].rotation, pi / 2, atol=1e-10)
|
||||
|
||||
# Mirror across x axis (axis 0): flips y-offset
|
||||
|
|
@ -28,7 +27,6 @@ def test_abstract_transform() -> None:
|
|||
# (0, 10) mirrored(0) -> (0, -10)
|
||||
# rotation pi/2 mirrored(0) -> -pi/2 == 3pi/2
|
||||
assert_allclose(abs_obj.ports["A"].offset, [0, -10], atol=1e-10)
|
||||
assert abs_obj.ports["A"].rotation is not None
|
||||
assert_allclose(abs_obj.ports["A"].rotation, 3 * pi / 2, atol=1e-10)
|
||||
|
||||
|
||||
|
|
@ -50,7 +48,6 @@ def test_abstract_ref_transform() -> None:
|
|||
# (0, 10) -> (100, 110)
|
||||
|
||||
assert_allclose(abs_obj.ports["A"].offset, [100, 110], atol=1e-10)
|
||||
assert abs_obj.ports["A"].rotation is not None
|
||||
assert_allclose(abs_obj.ports["A"].rotation, pi / 2, atol=1e-10)
|
||||
|
||||
|
||||
|
|
@ -60,5 +57,4 @@ def test_abstract_undo_transform() -> None:
|
|||
|
||||
abs_obj.undo_ref_transform(ref)
|
||||
assert_allclose(abs_obj.ports["A"].offset, [10, 0], atol=1e-10)
|
||||
assert abs_obj.ports["A"].rotation is not None
|
||||
assert_allclose(abs_obj.ports["A"].rotation, 0, atol=1e-10)
|
||||
|
|
|
|||
|
|
@ -1,119 +0,0 @@
|
|||
import pytest
|
||||
import numpy
|
||||
from numpy.testing import assert_allclose
|
||||
from masque.pattern import Pattern
|
||||
from masque.shapes.polygon import Polygon
|
||||
from masque.repetition import Grid
|
||||
from masque.library import Library
|
||||
|
||||
def test_layer_as_polygons_basic() -> None:
|
||||
pat = Pattern()
|
||||
pat.polygon((1, 0), [[0, 0], [1, 0], [1, 1], [0, 1]])
|
||||
|
||||
polys = pat.layer_as_polygons((1, 0), flatten=False)
|
||||
assert len(polys) == 1
|
||||
assert isinstance(polys[0], Polygon)
|
||||
assert_allclose(polys[0].vertices, [[0, 0], [1, 0], [1, 1], [0, 1]])
|
||||
|
||||
def test_layer_as_polygons_repetition() -> None:
|
||||
pat = Pattern()
|
||||
rep = Grid(a_vector=(2, 0), a_count=2)
|
||||
pat.polygon((1, 0), [[0, 0], [1, 0], [1, 1], [0, 1]], repetition=rep)
|
||||
|
||||
polys = pat.layer_as_polygons((1, 0), flatten=False)
|
||||
assert len(polys) == 2
|
||||
# First polygon at (0,0)
|
||||
assert_allclose(polys[0].vertices, [[0, 0], [1, 0], [1, 1], [0, 1]])
|
||||
# Second polygon at (2,0)
|
||||
assert_allclose(polys[1].vertices, [[2, 0], [3, 0], [3, 1], [2, 1]])
|
||||
|
||||
def test_layer_as_polygons_flatten() -> None:
|
||||
lib = Library()
|
||||
|
||||
child = Pattern()
|
||||
child.polygon((1, 0), [[0, 0], [1, 0], [1, 1]])
|
||||
lib['child'] = child
|
||||
|
||||
parent = Pattern()
|
||||
parent.ref('child', offset=(10, 10), rotation=numpy.pi/2)
|
||||
|
||||
polys = parent.layer_as_polygons((1, 0), flatten=True, library=lib)
|
||||
assert len(polys) == 1
|
||||
# Original child at (0,0) with rot pi/2 is still at (0,0) in its own space?
|
||||
# No, ref.as_pattern(child) will apply the transform.
|
||||
# Child (0,0), (1,0), (1,1) rotated pi/2 around (0,0) -> (0,0), (0,1), (-1,1)
|
||||
# Then offset by (10,10) -> (10,10), (10,11), (9,11)
|
||||
|
||||
# Let's verify the vertices
|
||||
expected = numpy.array([[10, 10], [10, 11], [9, 11]])
|
||||
assert_allclose(polys[0].vertices, expected, atol=1e-10)
|
||||
|
||||
def test_boolean_import_error() -> None:
|
||||
from masque import boolean
|
||||
# If pyclipper is not installed, this should raise ImportError
|
||||
try:
|
||||
import pyclipper # noqa: F401
|
||||
pytest.skip("pyclipper is installed, cannot test ImportError")
|
||||
except ImportError:
|
||||
with pytest.raises(ImportError, match="Boolean operations require 'pyclipper'"):
|
||||
boolean([], [], operation='union')
|
||||
|
||||
def test_polygon_boolean_shortcut() -> None:
|
||||
poly = Polygon([[0, 0], [1, 0], [1, 1]])
|
||||
# This should also raise ImportError if pyclipper is missing
|
||||
try:
|
||||
import pyclipper # noqa: F401
|
||||
pytest.skip("pyclipper is installed")
|
||||
except ImportError:
|
||||
with pytest.raises(ImportError, match="Boolean operations require 'pyclipper'"):
|
||||
poly.boolean(poly)
|
||||
|
||||
def test_bridge_holes() -> None:
|
||||
from masque.utils.boolean import _bridge_holes
|
||||
|
||||
# Outer: 10x10 square
|
||||
outer = numpy.array([[0, 0], [10, 0], [10, 10], [0, 10]])
|
||||
# Hole: 2x2 square in the middle
|
||||
hole = numpy.array([[4, 4], [6, 4], [6, 6], [4, 6]])
|
||||
|
||||
bridged = _bridge_holes(outer, [hole])
|
||||
|
||||
# We expect more vertices than outer + hole
|
||||
# Original outer has 4, hole has 4. Bridge adds 2 (to hole) and 2 (back to outer) + 1 to close hole loop?
|
||||
# Our implementation:
|
||||
# 1. outer up to bridge edge (best_edge_idx)
|
||||
# 2. bridge point on outer
|
||||
# 3. hole reordered starting at max X
|
||||
# 4. close hole loop (repeat max X)
|
||||
# 5. bridge point on outer again
|
||||
# 6. rest of outer
|
||||
|
||||
# max X of hole is 6 at (6,4) or (6,6). argmax will pick first one.
|
||||
# hole vertices: [4,4], [6,4], [6,6], [4,6]. argmax(x) is index 1: (6,4)
|
||||
# roll hole to start at (6,4): [6,4], [6,6], [4,6], [4,4]
|
||||
|
||||
# intersection of ray from (6,4) to right:
|
||||
# edges of outer: (0,0)-(10,0), (10,0)-(10,10), (10,10)-(0,10), (0,10)-(0,0)
|
||||
# edge (10,0)-(10,10) spans y=4.
|
||||
# intersection at (10,4). best_edge_idx = 1 (edge from index 1 to 2)
|
||||
|
||||
# vertices added:
|
||||
# outer[0:2]: (0,0), (10,0)
|
||||
# bridge pt: (10,4)
|
||||
# hole: (6,4), (6,6), (4,6), (4,4)
|
||||
# hole close: (6,4)
|
||||
# bridge pt back: (10,4)
|
||||
# outer[2:]: (10,10), (0,10)
|
||||
|
||||
expected_len = 11
|
||||
assert len(bridged) == expected_len
|
||||
|
||||
# verify it wraps around the hole and back
|
||||
# index 2 is bridge_pt
|
||||
assert_allclose(bridged[2], [10, 4])
|
||||
# index 3 is hole reordered max X
|
||||
assert_allclose(bridged[3], [6, 4])
|
||||
# index 7 is hole closed at max X
|
||||
assert_allclose(bridged[7], [6, 4])
|
||||
# index 8 is bridge_pt back
|
||||
assert_allclose(bridged[8], [10, 4])
|
||||
|
|
@ -50,7 +50,6 @@ def test_builder_plug() -> None:
|
|||
|
||||
assert "start" in b.ports
|
||||
assert_equal(b.ports["start"].offset, [90, 100])
|
||||
assert b.ports["start"].rotation is not None
|
||||
assert_allclose(b.ports["start"].rotation, 0, atol=1e-10)
|
||||
|
||||
|
||||
|
|
@ -127,5 +126,4 @@ def test_dead_plug_best_effort() -> None:
|
|||
# 3. Translate by s_port.offset (0,0): (-10,-10)
|
||||
assert_allclose(b.ports['B'].offset, [-10, -10], atol=1e-10)
|
||||
# P2 rot pi + transform rot -pi = 0
|
||||
assert b.ports['B'].rotation is not None
|
||||
assert_allclose(b.ports['B'].rotation, 0, atol=1e-10)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
from pathlib import Path
|
||||
from typing import cast
|
||||
import pytest
|
||||
from numpy.testing import assert_allclose
|
||||
|
||||
|
|
@ -78,18 +77,17 @@ def test_gdsii_full_roundtrip(tmp_path: Path) -> None:
|
|||
# Order might be different depending on how they were written,
|
||||
# but here they should match the order they were added if dict order is preserved.
|
||||
# Actually, they are grouped by layer.
|
||||
p_flush = cast("MPath", read_paths.shapes[(2, 0)][0])
|
||||
p_flush = read_paths.shapes[(2, 0)][0]
|
||||
assert p_flush.cap == MPath.Cap.Flush
|
||||
|
||||
p_square = cast("MPath", read_paths.shapes[(2, 1)][0])
|
||||
p_square = read_paths.shapes[(2, 1)][0]
|
||||
assert p_square.cap == MPath.Cap.Square
|
||||
|
||||
p_circle = cast("MPath", read_paths.shapes[(2, 2)][0])
|
||||
p_circle = read_paths.shapes[(2, 2)][0]
|
||||
assert p_circle.cap == MPath.Cap.Circle
|
||||
|
||||
p_custom = cast("MPath", read_paths.shapes[(2, 3)][0])
|
||||
p_custom = read_paths.shapes[(2, 3)][0]
|
||||
assert p_custom.cap == MPath.Cap.SquareCustom
|
||||
assert p_custom.cap_extensions is not None
|
||||
assert_allclose(p_custom.cap_extensions, (1, 5))
|
||||
|
||||
# Check Refs with repetitions
|
||||
|
|
@ -127,8 +125,8 @@ def test_oasis_full_roundtrip(tmp_path: Path) -> None:
|
|||
|
||||
# Check Path caps
|
||||
read_paths = read_lib["paths"]
|
||||
assert cast("MPath", read_paths.shapes[(2, 0)][0]).cap == MPath.Cap.Flush
|
||||
assert cast("MPath", read_paths.shapes[(2, 1)][0]).cap == MPath.Cap.Square
|
||||
assert read_paths.shapes[(2, 0)][0].cap == MPath.Cap.Flush
|
||||
assert read_paths.shapes[(2, 1)][0].cap == MPath.Cap.Square
|
||||
# OASIS HalfWidth is Square. masque's Square is also HalfWidth extension.
|
||||
# Wait, Circle cap in OASIS?
|
||||
# masque/file/oasis.py:
|
||||
|
|
|
|||
|
|
@ -1,12 +1,11 @@
|
|||
from pathlib import Path
|
||||
from typing import cast
|
||||
import numpy
|
||||
from numpy.testing import assert_equal, assert_allclose
|
||||
|
||||
from ..pattern import Pattern
|
||||
from ..library import Library
|
||||
from ..file import gdsii
|
||||
from ..shapes import Path as MPath, Polygon
|
||||
from ..shapes import Path as MPath
|
||||
|
||||
|
||||
def test_gdsii_roundtrip(tmp_path: Path) -> None:
|
||||
|
|
@ -37,14 +36,14 @@ def test_gdsii_roundtrip(tmp_path: Path) -> None:
|
|||
assert "ref_cell" in read_lib
|
||||
|
||||
# Check polygon
|
||||
read_poly = cast("Polygon", read_lib["poly_cell"].shapes[(1, 0)][0])
|
||||
read_poly = read_lib["poly_cell"].shapes[(1, 0)][0]
|
||||
# GDSII closes polygons, so it might have an extra vertex or different order
|
||||
assert len(read_poly.vertices) >= 4
|
||||
# Check bounds as a proxy for geometry correctness
|
||||
assert_equal(read_lib["poly_cell"].get_bounds(), [[0, 0], [10, 10]])
|
||||
|
||||
# Check path
|
||||
read_path = cast("MPath", read_lib["path_cell"].shapes[(2, 5)][0])
|
||||
read_path = read_lib["path_cell"].shapes[(2, 5)][0]
|
||||
assert isinstance(read_path, MPath)
|
||||
assert read_path.width == 10
|
||||
assert_equal(read_path.vertices, [[0, 0], [100, 0]])
|
||||
|
|
@ -67,5 +66,4 @@ def test_gdsii_annotations(tmp_path: Path) -> None:
|
|||
|
||||
read_lib, _ = gdsii.readfile(gds_file)
|
||||
read_ann = read_lib["cell"].shapes[(1, 0)][0].annotations
|
||||
assert read_ann is not None
|
||||
assert read_ann["1"] == ["hello"]
|
||||
|
|
|
|||
|
|
@ -1,12 +1,8 @@
|
|||
import pytest
|
||||
from typing import cast, TYPE_CHECKING
|
||||
from ..library import Library, LazyLibrary
|
||||
from ..pattern import Pattern
|
||||
from ..error import LibraryError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..shapes import Polygon
|
||||
|
||||
|
||||
def test_library_basic() -> None:
|
||||
lib = Library()
|
||||
|
|
@ -55,7 +51,7 @@ def test_library_flatten() -> None:
|
|||
assert not flat_parent.has_refs()
|
||||
assert len(flat_parent.shapes[(1, 0)]) == 1
|
||||
# Transformations are baked into vertices for Polygon
|
||||
assert_vertices = cast("Polygon", flat_parent.shapes[(1, 0)][0]).vertices
|
||||
assert_vertices = flat_parent.shapes[(1, 0)][0].vertices
|
||||
assert tuple(assert_vertices[0]) == (10.0, 10.0)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -28,7 +28,6 @@ def test_pather_straight(pather_setup: tuple[Pather, PathTool, Library]) -> None
|
|||
|
||||
# port rot pi/2 (North). Travel +pi relative to port -> South.
|
||||
assert_allclose(p.ports["start"].offset, [0, -10], atol=1e-10)
|
||||
assert p.ports["start"].rotation is not None
|
||||
assert_allclose(p.ports["start"].rotation, pi / 2, atol=1e-10)
|
||||
|
||||
|
||||
|
|
@ -47,7 +46,6 @@ def test_pather_bend(pather_setup: tuple[Pather, PathTool, Library]) -> None:
|
|||
assert_allclose(p.ports["start"].offset, [-1, -10], atol=1e-10)
|
||||
# North (pi/2) + CW (90 deg) -> West (pi)?
|
||||
# Actual behavior results in 0 (East) - apparently rotation is flipped.
|
||||
assert p.ports["start"].rotation is not None
|
||||
assert_allclose(p.ports["start"].rotation, 0, atol=1e-10)
|
||||
|
||||
|
||||
|
|
@ -82,7 +80,6 @@ def test_pather_at_chaining(pather_setup: tuple[Pather, PathTool, Library]) -> N
|
|||
assert_allclose(p.ports["start"].offset, [1, -20], atol=1e-10)
|
||||
# pi/2 (North) + CCW (90 deg) -> 0 (East)?
|
||||
# Actual behavior results in pi (West).
|
||||
assert p.ports["start"].rotation is not None
|
||||
assert_allclose(p.ports["start"].rotation, pi, atol=1e-10)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from typing import cast
|
||||
from numpy.testing import assert_equal, assert_allclose
|
||||
from numpy import pi
|
||||
|
||||
|
|
@ -57,7 +56,7 @@ def test_pattern_translate() -> None:
|
|||
pat.translate_elements((10, 20))
|
||||
|
||||
# Polygon.translate adds to vertices, and offset is always (0,0)
|
||||
assert_equal(cast("Polygon", pat.shapes[(1, 0)][0]).vertices[0], [10, 20])
|
||||
assert_equal(pat.shapes[(1, 0)][0].vertices[0], [10, 20])
|
||||
assert_equal(pat.ports["P1"].offset, [15, 25])
|
||||
|
||||
|
||||
|
|
@ -68,7 +67,7 @@ def test_pattern_scale() -> None:
|
|||
pat.scale_by(2)
|
||||
|
||||
# Vertices should be scaled
|
||||
assert_equal(cast("Polygon", pat.shapes[(1, 0)][0]).vertices, [[0, 0], [0, 2], [2, 2], [2, 0]])
|
||||
assert_equal(pat.shapes[(1, 0)][0].vertices, [[0, 0], [0, 2], [2, 2], [2, 0]])
|
||||
|
||||
|
||||
def test_pattern_rotate() -> None:
|
||||
|
|
@ -78,7 +77,7 @@ def test_pattern_rotate() -> None:
|
|||
pat.rotate_around((0, 0), pi / 2)
|
||||
|
||||
# [10, 0] rotated 90 deg around (0,0) is [0, 10]
|
||||
assert_allclose(cast("Polygon", pat.shapes[(1, 0)][0]).vertices[0], [0, 10], atol=1e-10)
|
||||
assert_allclose(pat.shapes[(1, 0)][0].vertices[0], [0, 10], atol=1e-10)
|
||||
|
||||
|
||||
def test_pattern_mirror() -> None:
|
||||
|
|
@ -87,7 +86,7 @@ def test_pattern_mirror() -> None:
|
|||
# Mirror across X axis (y -> -y)
|
||||
pat.mirror(0)
|
||||
|
||||
assert_equal(cast("Polygon", pat.shapes[(1, 0)][0]).vertices[0], [10, -5])
|
||||
assert_equal(pat.shapes[(1, 0)][0].vertices[0], [10, -5])
|
||||
|
||||
|
||||
def test_pattern_get_bounds() -> None:
|
||||
|
|
@ -107,9 +106,7 @@ def test_pattern_interface() -> None:
|
|||
|
||||
assert "in_A" in iface.ports
|
||||
assert "out_A" in iface.ports
|
||||
assert iface.ports["in_A"].rotation is not None
|
||||
assert_allclose(iface.ports["in_A"].rotation, pi, atol=1e-10)
|
||||
assert iface.ports["out_A"].rotation is not None
|
||||
assert_allclose(iface.ports["out_A"].rotation, 0, atol=1e-10)
|
||||
assert iface.ports["in_A"].ptype == "test"
|
||||
assert iface.ports["out_A"].ptype == "test"
|
||||
|
|
|
|||
|
|
@ -17,13 +17,11 @@ def test_port_transform() -> None:
|
|||
p = Port(offset=(10, 0), rotation=0)
|
||||
p.rotate_around((0, 0), pi / 2)
|
||||
assert_allclose(p.offset, [0, 10], atol=1e-10)
|
||||
assert p.rotation is not None
|
||||
assert_allclose(p.rotation, pi / 2, atol=1e-10)
|
||||
|
||||
p.mirror(0) # Mirror across x axis (axis 0): in-place relative to offset
|
||||
assert_allclose(p.offset, [0, 10], atol=1e-10)
|
||||
# rotation was pi/2 (90 deg), mirror across x (0 deg) -> -pi/2 == 3pi/2
|
||||
assert p.rotation is not None
|
||||
assert_allclose(p.rotation, 3 * pi / 2, atol=1e-10)
|
||||
|
||||
|
||||
|
|
@ -32,7 +30,6 @@ def test_port_flip_across() -> None:
|
|||
p.flip_across(axis=1) # Mirror across x=0: flips x-offset
|
||||
assert_equal(p.offset, [-10, 0])
|
||||
# rotation was 0, mirrored(1) -> pi
|
||||
assert p.rotation is not None
|
||||
assert_allclose(p.rotation, pi, atol=1e-10)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ def test_ports2data_roundtrip() -> None:
|
|||
|
||||
assert "P1" in pat2.ports
|
||||
assert_allclose(pat2.ports["P1"].offset, [10, 20], atol=1e-10)
|
||||
assert pat2.ports["P1"].rotation is not None
|
||||
assert_allclose(pat2.ports["P1"].rotation, numpy.pi / 2, atol=1e-10)
|
||||
assert pat2.ports["P1"].ptype == "test"
|
||||
|
||||
|
|
@ -53,5 +52,4 @@ def test_data_to_ports_hierarchical() -> None:
|
|||
# rot 0 + pi/2 = pi/2
|
||||
assert "A" in parent.ports
|
||||
assert_allclose(parent.ports["A"].offset, [100, 105], atol=1e-10)
|
||||
assert parent.ports["A"].rotation is not None
|
||||
assert_allclose(parent.ports["A"].rotation, numpy.pi / 2, atol=1e-10)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
from typing import cast, TYPE_CHECKING
|
||||
from numpy.testing import assert_equal, assert_allclose
|
||||
from numpy import pi
|
||||
|
||||
|
|
@ -6,9 +5,6 @@ from ..pattern import Pattern
|
|||
from ..ref import Ref
|
||||
from ..repetition import Grid
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..shapes import Polygon
|
||||
|
||||
|
||||
def test_ref_init() -> None:
|
||||
ref = Ref(offset=(10, 20), rotation=pi / 4, mirrored=True, scale=2.0)
|
||||
|
|
@ -26,7 +22,7 @@ def test_ref_as_pattern() -> None:
|
|||
transformed_pat = ref.as_pattern(sub_pat)
|
||||
|
||||
# Check transformed shape
|
||||
shape = cast("Polygon", transformed_pat.shapes[(1, 0)][0])
|
||||
shape = transformed_pat.shapes[(1, 0)][0]
|
||||
# ref.as_pattern deepcopies sub_pat then applies transformations:
|
||||
# 1. pattern.scale_by(2) -> vertices [[0,0], [2,0], [0,2]]
|
||||
# 2. pattern.rotate_around((0,0), pi/2) -> vertices [[0,0], [0,2], [-2,0]]
|
||||
|
|
@ -46,7 +42,7 @@ def test_ref_with_repetition() -> None:
|
|||
# Should have 4 shapes
|
||||
assert len(repeated_pat.shapes[(1, 0)]) == 4
|
||||
|
||||
first_verts = sorted([tuple(cast("Polygon", s).vertices[0]) for s in repeated_pat.shapes[(1, 0)]])
|
||||
first_verts = sorted([tuple(s.vertices[0]) for s in repeated_pat.shapes[(1, 0)]])
|
||||
assert first_verts == [(0.0, 0.0), (0.0, 10.0), (10.0, 0.0), (10.0, 10.0)]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
import pytest
|
||||
from typing import cast, TYPE_CHECKING
|
||||
from numpy.testing import assert_allclose
|
||||
from numpy import pi
|
||||
|
||||
|
|
@ -8,9 +7,6 @@ from ..builder.tools import PathTool
|
|||
from ..library import Library
|
||||
from ..ports import Port
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..shapes import Path
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def rpather_setup() -> tuple[RenderPather, PathTool, Library]:
|
||||
|
|
@ -41,7 +37,7 @@ def test_renderpather_basic(rpather_setup: tuple[RenderPather, PathTool, Library
|
|||
# start_port rot pi/2. pi/2 + pi = 3pi/2.
|
||||
# (10, 0) rotated 3pi/2 -> (0, -10)
|
||||
# So vertices: (0,0), (0,-10), (0,-20)
|
||||
path_shape = cast("Path", rp.pattern.shapes[(1, 0)][0])
|
||||
path_shape = rp.pattern.shapes[(1, 0)][0]
|
||||
assert len(path_shape.vertices) == 3
|
||||
assert_allclose(path_shape.vertices, [[0, 0], [0, -10], [0, -20]], atol=1e-10)
|
||||
|
||||
|
|
@ -52,7 +48,7 @@ def test_renderpather_bend(rpather_setup: tuple[RenderPather, PathTool, Library]
|
|||
rp.at("start").path(ccw=None, length=10).path(ccw=False, length=10)
|
||||
|
||||
rp.render()
|
||||
path_shape = cast("Path", rp.pattern.shapes[(1, 0)][0])
|
||||
path_shape = rp.pattern.shapes[(1, 0)][0]
|
||||
# Path vertices:
|
||||
# 1. Start (0,0)
|
||||
# 2. Straight end: (0, -10)
|
||||
|
|
|
|||
|
|
@ -25,9 +25,8 @@ class Mirrorable(metaclass=ABCMeta):
|
|||
to (0, 0), this is equivalent to mirroring in the container's coordinate system.
|
||||
|
||||
Args:
|
||||
axis: Axis to mirror across:
|
||||
0: X-axis (flip y coords),
|
||||
1: Y-axis (flip x coords)
|
||||
axis: Axis to mirror across (0: x-axis, 1: y-axis).
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
|
|
@ -38,8 +37,8 @@ class Mirrorable(metaclass=ABCMeta):
|
|||
Optionally mirror the entity across both axes through its origin.
|
||||
|
||||
Args:
|
||||
across_x: Mirror across the horizontal X-axis (flip Y coordinates).
|
||||
across_y: Mirror across the vertical Y-axis (flip X coordinates).
|
||||
across_x: Mirror across x axis (flip y)
|
||||
across_y: Mirror across y axis (flip x)
|
||||
|
||||
Returns:
|
||||
self
|
||||
|
|
@ -82,7 +81,7 @@ class Flippable(Positionable, metaclass=ABCMeta):
|
|||
into account.
|
||||
|
||||
Args:
|
||||
axis: Axis to mirror across. 0: x-axis (flip y coord), 1: y-axis (flip x coord).
|
||||
axis: Axis to mirror across. 0 mirrors across y=0. 1 mirrors across x=0.
|
||||
x: Vertical line x=val to mirror across.
|
||||
y: Horizontal line y=val to mirror across.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,180 +0,0 @@
|
|||
from typing import Any, Literal
|
||||
from collections.abc import Iterable
|
||||
import logging
|
||||
|
||||
import numpy
|
||||
from numpy.typing import NDArray
|
||||
|
||||
from ..shapes.polygon import Polygon
|
||||
from ..error import PatternError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def _bridge_holes(outer_path: NDArray[numpy.float64], holes: list[NDArray[numpy.float64]]) -> NDArray[numpy.float64]:
|
||||
"""
|
||||
Bridge multiple holes into an outer boundary using zero-width slits.
|
||||
"""
|
||||
current_outer = outer_path
|
||||
|
||||
# Sort holes by max X to potentially minimize bridge lengths or complexity
|
||||
# (though not strictly necessary for correctness)
|
||||
holes = sorted(holes, key=lambda h: numpy.max(h[:, 0]), reverse=True)
|
||||
|
||||
for hole in holes:
|
||||
# Find max X vertex of hole
|
||||
max_idx = numpy.argmax(hole[:, 0])
|
||||
m = hole[max_idx]
|
||||
|
||||
# Find intersection of ray (m.x, m.y) + (t, 0) with current_outer edges
|
||||
best_t = numpy.inf
|
||||
best_pt = None
|
||||
best_edge_idx = -1
|
||||
|
||||
n = len(current_outer)
|
||||
for i in range(n):
|
||||
p1 = current_outer[i]
|
||||
p2 = current_outer[(i + 1) % n]
|
||||
|
||||
# Check if edge (p1, p2) spans m.y
|
||||
if (p1[1] <= m[1] < p2[1]) or (p2[1] <= m[1] < p1[1]):
|
||||
# Intersection x:
|
||||
# x = p1.x + (m.y - p1.y) * (p2.x - p1.x) / (p2.y - p1.y)
|
||||
t = (p1[0] + (m[1] - p1[1]) * (p2[0] - p1[0]) / (p2[1] - p1[1])) - m[0]
|
||||
if 0 <= t < best_t:
|
||||
best_t = t
|
||||
best_pt = numpy.array([m[0] + t, m[1]])
|
||||
best_edge_idx = i
|
||||
|
||||
if best_edge_idx == -1:
|
||||
# Fallback: find nearest vertex if ray fails (shouldn't happen for valid hole)
|
||||
dists = numpy.linalg.norm(current_outer - m, axis=1)
|
||||
best_edge_idx = int(numpy.argmin(dists))
|
||||
best_pt = current_outer[best_edge_idx]
|
||||
# Adjust best_edge_idx to insert AFTER this vertex
|
||||
# (treating it as a degenerate edge)
|
||||
|
||||
assert best_pt is not None
|
||||
|
||||
# Reorder hole vertices to start at m
|
||||
hole_reordered = numpy.roll(hole, -max_idx, axis=0)
|
||||
|
||||
# Construct new outer:
|
||||
# 1. Start of outer up to best_edge_idx
|
||||
# 2. Intersection point
|
||||
# 3. Hole vertices (starting and ending at m)
|
||||
# 4. Intersection point (to close slit)
|
||||
# 5. Rest of outer
|
||||
|
||||
new_outer: list[NDArray[numpy.float64]] = []
|
||||
new_outer.extend(current_outer[:best_edge_idx + 1])
|
||||
new_outer.append(best_pt)
|
||||
new_outer.extend(hole_reordered)
|
||||
new_outer.append(hole_reordered[0]) # close hole loop at m
|
||||
new_outer.append(best_pt) # back to outer
|
||||
new_outer.extend(current_outer[best_edge_idx + 1:])
|
||||
|
||||
current_outer = numpy.array(new_outer)
|
||||
|
||||
return current_outer
|
||||
|
||||
def boolean(
|
||||
subjects: Iterable[Any],
|
||||
clips: Iterable[Any] | None = None,
|
||||
operation: Literal['union', 'intersection', 'difference', 'xor'] = 'union',
|
||||
scale: float = 1e6,
|
||||
) -> list[Polygon]:
|
||||
"""
|
||||
Perform a boolean operation on two sets of polygons.
|
||||
|
||||
Args:
|
||||
subjects: List of subjects (Polygons or vertex arrays).
|
||||
clips: List of clips (Polygons or vertex arrays).
|
||||
operation: The boolean operation to perform.
|
||||
scale: Scaling factor for integer conversion (pyclipper uses integers).
|
||||
|
||||
Returns:
|
||||
A list of result Polygons.
|
||||
"""
|
||||
try:
|
||||
import pyclipper
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Boolean operations require 'pyclipper'. "
|
||||
"Install it with 'pip install pyclipper' or 'pip install masque[boolean]'."
|
||||
) from None
|
||||
|
||||
op_map = {
|
||||
'union': pyclipper.PT_UNION,
|
||||
'intersection': pyclipper.PT_INTERSECTION,
|
||||
'difference': pyclipper.PT_DIFFERENCE,
|
||||
'xor': pyclipper.PT_XOR,
|
||||
}
|
||||
|
||||
def to_vertices(objs: Iterable[Any] | None) -> list[NDArray]:
|
||||
if objs is None:
|
||||
return []
|
||||
verts = []
|
||||
for obj in objs:
|
||||
if hasattr(obj, 'to_polygons'):
|
||||
for p in obj.to_polygons():
|
||||
verts.append(p.vertices)
|
||||
elif isinstance(obj, numpy.ndarray):
|
||||
verts.append(obj)
|
||||
elif isinstance(obj, Polygon):
|
||||
verts.append(obj.vertices)
|
||||
else:
|
||||
# Try to iterate if it's an iterable of shapes
|
||||
try:
|
||||
for sub in obj:
|
||||
if hasattr(sub, 'to_polygons'):
|
||||
for p in sub.to_polygons():
|
||||
verts.append(p.vertices)
|
||||
elif isinstance(sub, Polygon):
|
||||
verts.append(sub.vertices)
|
||||
except TypeError:
|
||||
raise PatternError(f"Unsupported type for boolean operation: {type(obj)}") from None
|
||||
return verts
|
||||
|
||||
subject_verts = to_vertices(subjects)
|
||||
clip_verts = to_vertices(clips)
|
||||
|
||||
pc = pyclipper.Pyclipper()
|
||||
pc.AddPaths(pyclipper.scale_to_clipper(subject_verts, scale), pyclipper.PT_SUBJECT, True)
|
||||
if clip_verts:
|
||||
pc.AddPaths(pyclipper.scale_to_clipper(clip_verts, scale), pyclipper.PT_CLIP, True)
|
||||
|
||||
# Use GetPolyTree to distinguish between outers and holes
|
||||
polytree = pc.Execute2(op_map[operation.lower()], pyclipper.PFT_NONZERO, pyclipper.PFT_NONZERO)
|
||||
|
||||
result_polygons = []
|
||||
|
||||
def process_node(node: Any) -> None:
|
||||
if not node.IsHole:
|
||||
# This is an outer boundary
|
||||
outer_path = numpy.array(pyclipper.scale_from_clipper(node.Contour, scale))
|
||||
|
||||
# Find immediate holes
|
||||
holes = []
|
||||
for child in node.Childs:
|
||||
if child.IsHole:
|
||||
holes.append(numpy.array(pyclipper.scale_from_clipper(child.Contour, scale)))
|
||||
|
||||
if holes:
|
||||
combined_vertices = _bridge_holes(outer_path, holes)
|
||||
result_polygons.append(Polygon(combined_vertices))
|
||||
else:
|
||||
result_polygons.append(Polygon(outer_path))
|
||||
|
||||
# Recursively process children of holes (which are nested outers)
|
||||
for child in node.Childs:
|
||||
if child.IsHole:
|
||||
for grandchild in child.Childs:
|
||||
process_node(grandchild)
|
||||
else:
|
||||
# Holes are processed as children of outers
|
||||
pass
|
||||
|
||||
for top_node in polytree.Childs:
|
||||
process_node(top_node)
|
||||
|
||||
return result_polygons
|
||||
|
|
@ -56,8 +56,6 @@ dev = [
|
|||
"masque[text]",
|
||||
"masque[manhattanize]",
|
||||
"masque[manhattanize_slow]",
|
||||
"ruff>=0.15.1",
|
||||
"mypy>=1.19.1",
|
||||
]
|
||||
|
||||
[tool.hatch.version]
|
||||
|
|
@ -71,7 +69,6 @@ visualize = ["matplotlib"]
|
|||
text = ["matplotlib", "freetype-py"]
|
||||
manhattanize = ["scikit-image"]
|
||||
manhattanize_slow = ["float_raster"]
|
||||
boolean = ["pyclipper"]
|
||||
|
||||
|
||||
[tool.ruff]
|
||||
|
|
@ -109,9 +106,3 @@ lint.ignore = [
|
|||
addopts = "-rsXx"
|
||||
testpaths = ["masque"]
|
||||
|
||||
[tool.mypy]
|
||||
mypy_path = "stubs"
|
||||
python_version = "3.11"
|
||||
strict = false
|
||||
check_untyped_defs = true
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +0,0 @@
|
|||
from typing import Any, TextIO, Iterable
|
||||
from .layouts import Modelspace, BlockRecords
|
||||
|
||||
class Drawing:
|
||||
blocks: BlockRecords
|
||||
@property
|
||||
def layers(self) -> Iterable[Any]: ...
|
||||
def modelspace(self) -> Modelspace: ...
|
||||
def write(self, stream: TextIO) -> None: ...
|
||||
|
||||
def new(version: str = ..., setup: bool = ...) -> Drawing: ...
|
||||
def read(stream: TextIO) -> Drawing: ...
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
from typing import Any, Iterable, Tuple, Sequence
|
||||
|
||||
class DXFEntity:
|
||||
def dxfattribs(self) -> dict[str, Any]: ...
|
||||
def dxftype(self) -> str: ...
|
||||
|
||||
class LWPolyline(DXFEntity):
|
||||
def get_points(self) -> Iterable[Tuple[float, ...]]: ...
|
||||
|
||||
class Polyline(DXFEntity):
|
||||
def points(self) -> Iterable[Any]: ... # has .xyz
|
||||
|
||||
class Text(DXFEntity):
|
||||
def get_placement(self) -> Tuple[int, Tuple[float, float, float]]: ...
|
||||
def set_placement(self, p: Sequence[float], align: int = ...) -> Text: ...
|
||||
|
||||
class Insert(DXFEntity): ...
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
from enum import IntEnum
|
||||
|
||||
class TextEntityAlignment(IntEnum):
|
||||
BOTTOM_LEFT = ...
|
||||
|
|
@ -1,20 +0,0 @@
|
|||
from typing import Any, Iterator, Sequence, Union, Iterable
|
||||
from .entities import DXFEntity
|
||||
|
||||
class BaseLayout:
|
||||
def __iter__(self) -> Iterator[DXFEntity]: ...
|
||||
def add_lwpolyline(self, points: Iterable[Sequence[float]], dxfattribs: dict[str, Any] = ...) -> Any: ...
|
||||
def add_text(self, text: str, dxfattribs: dict[str, Any] = ...) -> Any: ...
|
||||
def add_blockref(self, name: str, insert: Any, dxfattribs: dict[str, Any] = ...) -> Any: ...
|
||||
|
||||
class Modelspace(BaseLayout):
|
||||
@property
|
||||
def name(self) -> str: ...
|
||||
|
||||
class BlockLayout(BaseLayout):
|
||||
@property
|
||||
def name(self) -> str: ...
|
||||
|
||||
class BlockRecords:
|
||||
def new(self, name: str) -> BlockLayout: ...
|
||||
def __iter__(self) -> Iterator[BlockLayout]: ...
|
||||
|
|
@ -1,46 +0,0 @@
|
|||
from typing import Any
|
||||
from collections.abc import Iterable, Sequence
|
||||
import numpy
|
||||
from numpy.typing import NDArray
|
||||
|
||||
|
||||
# Basic types for Clipper integer coordinates
|
||||
Path = Sequence[tuple[int, int]]
|
||||
Paths = Sequence[Path]
|
||||
|
||||
# Types for input/output floating point coordinates
|
||||
FloatPoint = tuple[float, float] | NDArray[numpy.floating]
|
||||
FloatPath = Sequence[FloatPoint] | NDArray[numpy.floating]
|
||||
FloatPaths = Iterable[FloatPath]
|
||||
|
||||
# Constants
|
||||
PT_SUBJECT: int
|
||||
PT_CLIP: int
|
||||
|
||||
PT_UNION: int
|
||||
PT_INTERSECTION: int
|
||||
PT_DIFFERENCE: int
|
||||
PT_XOR: int
|
||||
|
||||
PFT_EVENODD: int
|
||||
PFT_NONZERO: int
|
||||
PFT_POSITIVE: int
|
||||
PFT_NEGATIVE: int
|
||||
|
||||
# Scaling functions
|
||||
def scale_to_clipper(paths: FloatPaths, scale: float = ...) -> Paths: ...
|
||||
def scale_from_clipper(paths: Path | Paths, scale: float = ...) -> Any: ...
|
||||
|
||||
class PolyNode:
|
||||
Contour: Path
|
||||
Childs: list[PolyNode]
|
||||
Parent: PolyNode
|
||||
IsHole: bool
|
||||
|
||||
class Pyclipper:
|
||||
def __init__(self) -> None: ...
|
||||
def AddPath(self, path: Path, poly_type: int, closed: bool) -> None: ...
|
||||
def AddPaths(self, paths: Paths, poly_type: int, closed: bool) -> None: ...
|
||||
def Execute(self, clip_type: int, subj_fill_type: int = ..., clip_fill_type: int = ...) -> Paths: ...
|
||||
def Execute2(self, clip_type: int, subj_fill_type: int = ..., clip_fill_type: int = ...) -> PolyNode: ...
|
||||
def Clear(self) -> None: ...
|
||||
Loading…
Add table
Add a link
Reference in a new issue