Compare commits

...

142 Commits

Author SHA1 Message Date
jan
dc4f24a028 delete FlatBuilder (Builder subsumes it) 2023-04-11 20:05:50 -07:00
jan
e5de33e1f1 pather fixes / type updates 2023-04-11 19:57:09 -07:00
jan
f22e737e60 add RenderPather 2023-04-11 11:47:57 -07:00
jan
6ec4823244 comment 2023-04-11 11:44:53 -07:00
jan
fa7b94a4c0 split out find_ptransform (static version, only need ports) 2023-04-11 11:44:47 -07:00
jan
9b88be0e92 add todo about underscore 2023-04-08 00:40:52 -07:00
jan
4aad8ab786 shorten labels 2023-04-07 23:50:31 -07:00
jan
de04d06b7a cleanup 2023-04-07 23:49:20 -07:00
jan
8b3f76c2e3 split pather into its own file 2023-04-07 23:20:09 -07:00
jan
66f3ad04b7 comment updates 2023-04-07 23:19:55 -07:00
jan
ed77e389af only mutable variant should have rename_top 2023-04-07 22:29:47 -07:00
jan
372deaca09 fixes 2023-04-07 22:00:23 -07:00
jan
8b92d1ee96 add functions for dealing with the topcell and its name 2023-04-07 21:53:48 -07:00
jan
e7a1d1824a add mktree 2023-04-07 18:13:21 -07:00
jan
9c9d3c3928 redo library class naming 2023-04-07 18:08:42 -07:00
jan
c7505a12b0 should be union; we want to exclude dangling refs 2023-04-07 16:55:50 -07:00
jan
abef8771db fixes to subtree and lshift, as well as some cast() improvements 2023-04-07 16:48:40 -07:00
jan
f1baf8b577 oneshot available at toplevel 2023-04-07 16:33:59 -07:00
jan
355af43fe4 add @oneshot decorator 2023-04-07 16:33:23 -07:00
jan
e8b5c7dec8 lshift operator shouldn't special-case trees
Instead, just call .tops() if there are multiple cells, and fail if
there are multiple tops
2023-04-07 15:29:14 -07:00
jan
438b81e62e find_toplevel -> tops 2023-04-07 15:20:51 -07:00
jan
41409cf4f7 create no longer exists. Make mk() give similar ordering as mkpat() 2023-04-06 17:09:46 -07:00
jan
0a14325af8 fix return value 2023-04-06 17:06:41 -07:00
jan
5b1abf5f72 top is always a string 2023-04-06 17:06:13 -07:00
jan
463c41b62a cleanup 2023-04-06 17:03:31 -07:00
jan
62b82eb230 get rid of NamedPattern in favor of just returning a tuple 2023-04-06 16:52:01 -07:00
3e48cc7190 Drop ports when repeating 2023-03-31 13:35:18 -07:00
2364288ba7 port translation is already handled in Pattern 2023-03-31 13:34:49 -07:00
ddcd38674f drop ability to use python-gdsii 2023-03-19 10:18:01 -07:00
742058885f fix rounding 2023-03-19 10:17:37 -07:00
0917b02a31 str(namedpattern) should just return its name 2023-03-19 10:17:30 -07:00
c515ada2f8 updates to Pattern.polygonize() 2023-03-19 10:17:09 -07:00
68ac593270 update to newer ezdxf 2023-03-19 10:16:54 -07:00
e87b13c4eb Need to check against self, since we may add new conflicts as we go 2023-02-24 09:34:26 -08:00
f5d1fd2c29 Pipe-operator does not support forward references 2023-02-23 16:23:06 -08:00
28424be3db add polygon() and label() convenience methods 2023-02-23 13:42:26 -08:00
a710494dd8 use Self type 2023-02-23 13:37:34 -08:00
c9402500e2 modernize type annotations 2023-02-23 13:15:32 -08:00
dfd745a76b fix error message 2023-02-23 11:26:07 -08:00
23c64b4f63 remove per-shape polygonization state 2023-02-23 11:25:40 -08:00
7a4a96ff5f fixes based on mypy 2023-02-09 16:43:06 -08:00
3191866ce0 add prune_empty and delete() 2023-02-09 16:38:42 -08:00
8c42145e44 fixes/updates 2023-02-09 16:38:33 -08:00
1d720b6577 Drop ports by default 2023-02-08 09:26:44 -08:00
38a7ba6434 force 'wb' mode for gzipfile 2023-02-08 09:26:24 -08:00
2e8d06ad6e data_to_ports max_depth default to 0
Makes it more compatible with LazyLibrary -- with recursive approach, we
have to load all the subcells to run ports2data, but those subcells may
or may not exist (e.g. partial library, or maybe we've removed some
duplicates-to-be prior to merging with a different lib)
2023-02-08 08:51:30 -08:00
ea1a882c4e pass along library for bounds 2023-02-08 08:46:38 -08:00
ed8f2c1864 fix precache 2023-02-08 08:44:42 -08:00
492565c1a6 redo library merging 2023-02-08 08:44:36 -08:00
c6b8027b4d pass along tools 2023-02-08 08:44:17 -08:00
e8348dfa75 Make default quiet for underscores 2023-02-07 14:34:47 -08:00
81b381e031 always apply postprocess 2023-02-07 14:25:56 -08:00
cca6b90830 misc fixes 2023-02-07 14:24:34 -08:00
d079b44883 Revert "allow ports2data to take a tree"
This reverts commit 44f823c736.
LazyLibrary can't take Trees anymore, so no need for it.
2023-02-07 09:19:01 -08:00
1d649389a0 LazyLibrary should not contain Trees
altering itself during iteration is not a good idea
2023-02-06 19:01:42 -08:00
dc2c12c26f missing import 2023-02-06 19:00:56 -08:00
a1c4cdee1e fix type for __contains__ 2023-02-06 18:58:53 -08:00
44f823c736 allow ports2data to take a tree 2023-02-06 12:39:43 -08:00
9d466882a0 misc fixes 2023-02-06 12:11:53 -08:00
369bad9ae4 Only allow 1-sized Libraries 2023-02-04 09:28:53 -08:00
ee6d857cad Allow lshift to operate on any library. If only one name, return it, else None 2023-02-04 09:08:05 -08:00
5e2018a1a1 add missing functions to tree 2023-02-04 09:06:51 -08:00
5446a8c40b add Pather.mk() 2023-02-04 09:06:31 -08:00
a9188c5655 add name arg 2023-02-04 09:06:22 -08:00
05f327387e pather reorganization/clenaup 2023-02-04 09:05:34 -08:00
482ca058bb add lshift operator to MutableLibrary 2023-01-31 22:50:10 -08:00
c69081331c set default for library to None 2023-01-31 22:33:45 -08:00
4a2c4c5220 Turn Builder into a subset of Pather 2023-01-31 22:28:02 -08:00
83c710a85f fix add_tree operator 2023-01-31 15:33:42 -08:00
81171e9b02 Allow LazyLibrary to store Trees as well? 2023-01-31 15:31:54 -08:00
02da37a890 Use lshift for tree combination 2023-01-31 15:31:22 -08:00
7da6f5126b stringy type 2023-01-31 12:07:50 -08:00
b9848d149c ergonomics 2023-01-31 12:05:44 -08:00
454f167340 Add Tree as a possible way to allow construction of whole subtrees at once 2023-01-30 18:38:56 -08:00
7191e5f62c Add move_references() and auto-move references during add()-with-rename
Also remove enable_cache, since we now rely on the cache.
2023-01-30 14:38:23 -08:00
3be4da3e7c implement auto-renaming during merge, and change _merge() to support it 2023-01-30 13:30:59 -08:00
jan
6f6143da1a remove some trailing undescores 2023-01-29 16:05:22 -08:00
3105a669b4 add NamedPattern 2023-01-27 10:07:39 -08:00
171d61ccab add .create() 2023-01-27 09:24:17 -08:00
95485ab4cd notes on organization 2023-01-26 23:51:13 -08:00
0be7f9d42a note in comments 2023-01-26 23:47:27 -08:00
7fc0902fe7 Add recurse arg to get_bounds 2023-01-26 23:47:16 -08:00
3758df6938 remove log messages 2023-01-26 19:51:15 -08:00
3205091286 Return WrapLibrary from read() and readfile() 2023-01-26 19:28:10 -08:00
9351f5b5f8 Default to adding ports at the origin 2023-01-26 19:16:34 -08:00
658eca5eea some cleanup 2023-01-26 16:49:42 -08:00
783953bb73 add FlatBuilder 2023-01-26 16:49:35 -08:00
afab6fd940 import ports2data at top level 2023-01-26 15:37:36 -08:00
7adaea32ec add library .rename(...) 2023-01-26 14:26:00 -08:00
d6b897131b missing comma 2023-01-26 13:54:04 -08:00
6172abf77c writefile should write to a temporary file first 2023-01-26 13:54:00 -08:00
6cbdd7930d add name_and_set 2023-01-26 11:43:55 -08:00
a061c5a2d9 add missing comments 2023-01-26 11:43:49 -08:00
f80c21ed4d Allow library __setitem__ to take in either Pattern or Callable
No longer need it to be Generic!
2023-01-26 11:36:27 -08:00
089e5192e3 various fixes and cleanup
mainly involving ports_to_data and data_to_ports
2023-01-25 23:57:02 -08:00
6599dad48f move builder.port_utils into utils.ports2data
and rename functions
2023-01-25 23:26:06 -08:00
c2ce9ed547 more fixes and improvements 2023-01-25 23:19:25 -08:00
6eb4af3203 get things working with a LazyLibrary hack while we think about cycles 2023-01-24 23:52:32 -08:00
22735125d5 Lots of progress on tutorials 2023-01-24 23:25:10 -08:00
34a5369a55 Add note about reproducibility for DXF 2023-01-24 14:13:46 -08:00
7cec1e84c9 remove dead code 2023-01-24 13:58:49 -08:00
592b91044b formatting 2023-01-24 13:43:49 -08:00
060f6978cd Fix extra vertex added during OASIS loading 2023-01-24 13:43:22 -08:00
1b04fb7ed0 lots of fixes to get test_rep running 2023-01-24 12:45:44 -08:00
88b64bf525 improve gzipped file reproducibility
Mostly avoid writing the old filename and modification time to the gzip
header
2023-01-24 12:45:21 -08:00
bd14ef37c7 clarify comment 2023-01-23 23:01:14 -08:00
95c41d4519 get rid of Mapping stuff on PortsList 2023-01-23 22:58:55 -08:00
c0b9b7fe81 add todos 2023-01-23 22:49:21 -08:00
189f517dcf add AbstractView 2023-01-23 22:48:31 -08:00
9f041e51f4 Move Abstract into its own file 2023-01-23 22:42:37 -08:00
09d76203e8 handle library=None 2023-01-23 22:35:15 -08:00
2302d29433 library can generate abstracts 2023-01-23 22:34:58 -08:00
09b7ecd80e B becomes BB for searchability 2023-01-23 22:34:44 -08:00
aff0df33cc PortsRef -> Abstract 2023-01-23 22:34:31 -08:00
326c9b9727 flake8-aided fixes 2023-01-23 22:27:26 -08:00
8484628f2f fix more type issues 2023-01-22 22:16:09 -08:00
6565b8baa3 more wip -- most central stuff is first pass done 2023-01-22 16:59:32 -08:00
df1acd7c87 wip -- more fixes 2023-01-21 23:38:53 -08:00
jan
743428d8d7 wip 2023-01-21 21:22:11 -08:00
jan
e482107366 wip 2023-01-19 22:20:16 -08:00
a15131f22a busL -> mpath 2023-01-18 22:32:57 -08:00
81eadffa56 comment out some ipython commands 2023-01-18 18:15:51 -08:00
cb8897b8fe some type updates 2023-01-18 18:14:53 -08:00
83b9af0cc3 Remove support for dose
Since there isn't GDS/OASIS level support for dose, this can be mostly
handled by using arbitrary layers/dtypes directly. Dose scaling isn't
handled as nicely that way, but it corresponds more directly to what
gets written to file.
2023-01-18 18:14:33 -08:00
dfdceefdcd fix some type-related issues 2023-01-18 17:15:14 -08:00
ecb61c9174 get rid of "identifier" 2023-01-18 17:14:35 -08:00
jan
1741cfb755 wip again 2023-01-13 20:33:14 -08:00
jan
58894fa596 delete duplicate utils submodule 2023-01-13 16:08:17 -08:00
5bb67c817f partial work on device libraries 2023-01-12 23:06:39 -08:00
fd045e16d4 various fixes 2023-01-12 23:06:12 -08:00
073ccacee9 remove duplicatre __delitem__ 2023-01-12 23:06:02 -08:00
804b662780 improve docs 2023-01-12 23:05:45 -08:00
14e9a7ccbe indirect type spec for Pattern 2023-01-12 23:04:59 -08:00
jan
9bb7ddbb79 Add lib types 2023-01-12 02:23:36 -08:00
jan
273d828d87 bifurcate Device into DeviceRef 2023-01-11 20:19:31 -08:00
b7df6a3f73 add notes about what is hard 2023-01-11 19:00:06 -08:00
b4eee6b84e make error message prettier 2023-01-11 19:00:06 -08:00
jan
42c3a2b1e1 WIP: make libraries and names first-class! 2023-01-11 18:59:57 -08:00
fff20b3da9 Avoid generating a container if only a single port is passed 2023-01-11 18:32:08 -08:00
4bae737630 allow bounds to be passed as args 2023-01-11 18:32:08 -08:00
9891ba9e47 allow passing a single Tool to be used as the default 2023-01-11 18:32:08 -08:00
df320e80cc Add functionality for building paths (single use wires/waveguides/etc) 2023-01-11 18:32:01 -08:00
66 changed files with 5700 additions and 5393 deletions

View File

@ -3,8 +3,8 @@
Masque is a Python module for designing lithography masks. Masque is a Python module for designing lithography masks.
The general idea is to implement something resembling the GDSII file-format, but The general idea is to implement something resembling the GDSII file-format, but
with some vectorized element types (eg. circles, not just polygons), better support for with some vectorized element types (eg. circles, not just polygons) and the ability
E-beam doses, and the ability to output to multiple formats. to output to multiple formats.
- [Source repository](https://mpxd.net/code/jan/masque) - [Source repository](https://mpxd.net/code/jan/masque)
- [PyPI](https://pypi.org/project/masque) - [PyPI](https://pypi.org/project/masque)
@ -15,7 +15,7 @@ E-beam doses, and the ability to output to multiple formats.
Requirements: Requirements:
* python >= 3.8 * python >= 3.8
* numpy * numpy
* klamath (used for `gdsii` i/o and library management) * klamath (optional, used for `gdsii` i/o)
* matplotlib (optional, used for `visualization` functions and `text`) * matplotlib (optional, used for `visualization` functions and `text`)
* ezdxf (optional, used for `dxf` i/o) * ezdxf (optional, used for `dxf` i/o)
* fatamorgana (optional, used for `oasis` i/o) * fatamorgana (optional, used for `oasis` i/o)
@ -35,9 +35,9 @@ pip3 install git+https://mpxd.net/code/jan/masque.git@release
## Translation ## Translation
- `Pattern`: OASIS or GDS "Cell", DXF "Block" - `Pattern`: OASIS or GDS "Cell", DXF "Block"
- `SubPattern`: GDS "AREF/SREF", OASIS "Placement" - `Ref`: GDS "AREF/SREF", OASIS "Placement"
- `Shape`: OASIS or GDS "Geometry element", DXF "LWPolyline" or "Polyline" - `Shape`: OASIS or GDS "Geometry element", DXF "LWPolyline" or "Polyline"
- `repetition`: OASIS "repetition". GDS "AREF" is a `SubPattern` combined with a `Grid` repetition. - `repetition`: OASIS "repetition". GDS "AREF" is a `Ref` combined with a `Grid` repetition.
- `Label`: OASIS, GDS, DXF "Text". - `Label`: OASIS, GDS, DXF "Text".
- `annotation`: OASIS or GDS "property" - `annotation`: OASIS or GDS "property"
@ -47,5 +47,13 @@ pip3 install git+https://mpxd.net/code/jan/masque.git@release
* Better interface for polygon operations (e.g. with `pyclipper`) * Better interface for polygon operations (e.g. with `pyclipper`)
- de-embedding - de-embedding
- boolean ops - boolean ops
* Construct polygons from bitmap using `skimage.find_contours`
* Deal with shape repetitions for dxf, svg * Deal with shape repetitions for dxf, svg
* Maybe lib.create(bname) -> (name, pat)
* Schematic:
- Simple cell:
+ Assumes no internal hierarchy, or only other simple hierarchy
+ Return pattern, refer to it by a well-known name
- Parametrized cell:
+ Take in `lib`
+ lib.create(), and return a string
+ Can have pcell hierarchy inside

View File

@ -0,0 +1,29 @@
import numpy
from pyclipper import (
Pyclipper, PT_CLIP, PT_SUBJECT, CT_UNION, CT_INTERSECTION, PFT_NONZERO,
scale_to_clipper, scale_from_clipper,
)
p = Pyclipper()
p.AddPaths([
[(-10, -10), (-10, 10), (-9, 10), (-9, -10)],
[(-10, 10), (10, 10), (10, 9), (-10, 9)],
[(10, 10), (10, -10), (9, -10), (9, 10)],
[(10, -10), (-10, -10), (-10, -9), (10, -9)],
], PT_SUBJECT, closed=True)
#p.Execute2?
#p.Execute?
p.Execute(PT_UNION, PT_NONZERO, PT_NONZERO)
p.Execute(CT_UNION, PT_NONZERO, PT_NONZERO)
p.Execute(CT_UNION, PFT_NONZERO, PFT_NONZERO)
p = Pyclipper()
p.AddPaths([
[(-10, -10), (-10, 10), (-9, 10), (-9, -10)],
[(-10, 10), (10, 10), (10, 9), (-10, 9)],
[(10, 10), (10, -10), (9, -10), (9, 10)],
[(10, -10), (-10, -10), (-10, -9), (10, -9)],
], PT_SUBJECT, closed=True)
r = p.Execute2(CT_UNION, PFT_NONZERO, PFT_NONZERO)
#r.Childs

41
examples/pic2mask.py Normal file
View File

@ -0,0 +1,41 @@
# pip install pillow scikit-image
# or
# sudo apt install python3-pil python3-skimage
from PIL import Image
from skimage.measure import find_contours
from matplotlib import pyplot
import numpy
from masque import Pattern, Polygon
from masque.file.gdsii import writefile
#
# Read the image into a numpy array
#
im = Image.open('./Desktop/Camera/IMG_20220626_091101.jpg')
aa = numpy.array(im.convert(mode='L').getdata()).reshape(im.height, im.width)
threshold = (aa.max() - aa.min()) / 2
#
# Find edge contours and plot them
#
contours = find_contours(aa, threshold)
pyplot.imshow(aa)
for contour in contours:
pyplot.plot(contour[:, 1], contour[:, 0], linewidth=2)
pyplot.show(block=False)
#
# Create the layout from the contours
#
pat = Pattern()
pat.shapes = [Polygon(vertices=vv) for vv in contours if len(vv) < 1_000]
lib = {}
lib['my_mask_name'] = pat
writefile(lib, 'test_contours.gds', meters_per_unit=1e-9)

View File

@ -1,103 +1,135 @@
from pprint import pprint
from pathlib import Path
import numpy import numpy
from numpy import pi from numpy import pi
import masque import masque
import masque.file.gdsii from masque import Pattern, Ref, Arc, Library
import masque.file.klamath
import masque.file.dxf
import masque.file.oasis
from masque import shapes, Pattern, SubPattern
from masque.repetition import Grid from masque.repetition import Grid
from masque.file import gdsii, dxf, oasis
from pprint import pprint
def main(): def main():
pat = masque.Pattern(name='ellip_grating') lib = Library()
cell_name = 'ellip_grating'
pat = masque.Pattern()
for rmin in numpy.arange(10, 15, 0.5): for rmin in numpy.arange(10, 15, 0.5):
pat.shapes.append(shapes.Arc( pat.shapes.append(Arc(
radii=(rmin, rmin), radii=(rmin, rmin),
width=0.1, width=0.1,
angles=(0*-numpy.pi/4, numpy.pi/4), angles=(0 * -pi/4, pi/4),
annotations={'1': ['blah']}, annotations={'1': ['blah']},
)) ))
pat.scale_by(1000) pat.scale_by(1000)
# pat.visualize() # pat.visualize()
pat2 = pat.copy() lib[cell_name] = pat
pat2.name = 'grating2' print(f'\nAdded {cell_name}:')
pat3 = Pattern('sref_test')
pat3.subpatterns = [
SubPattern(pat, offset=(1e5, 3e5), annotations={'4': ['Hello I am the base subpattern']}),
SubPattern(pat, offset=(2e5, 3e5), rotation=pi/3),
SubPattern(pat, offset=(3e5, 3e5), rotation=pi/2),
SubPattern(pat, offset=(4e5, 3e5), rotation=pi),
SubPattern(pat, offset=(5e5, 3e5), rotation=3*pi/2),
SubPattern(pat, mirrored=(True, False), offset=(1e5, 4e5)),
SubPattern(pat, mirrored=(True, False), offset=(2e5, 4e5), rotation=pi/3),
SubPattern(pat, mirrored=(True, False), offset=(3e5, 4e5), rotation=pi/2),
SubPattern(pat, mirrored=(True, False), offset=(4e5, 4e5), rotation=pi),
SubPattern(pat, mirrored=(True, False), offset=(5e5, 4e5), rotation=3*pi/2),
SubPattern(pat, mirrored=(False, True), offset=(1e5, 5e5)),
SubPattern(pat, mirrored=(False, True), offset=(2e5, 5e5), rotation=pi/3),
SubPattern(pat, mirrored=(False, True), offset=(3e5, 5e5), rotation=pi/2),
SubPattern(pat, mirrored=(False, True), offset=(4e5, 5e5), rotation=pi),
SubPattern(pat, mirrored=(False, True), offset=(5e5, 5e5), rotation=3*pi/2),
SubPattern(pat, mirrored=(True, True), offset=(1e5, 6e5)),
SubPattern(pat, mirrored=(True, True), offset=(2e5, 6e5), rotation=pi/3),
SubPattern(pat, mirrored=(True, True), offset=(3e5, 6e5), rotation=pi/2),
SubPattern(pat, mirrored=(True, True), offset=(4e5, 6e5), rotation=pi),
SubPattern(pat, mirrored=(True, True), offset=(5e5, 6e5), rotation=3*pi/2),
]
pprint(pat3)
pprint(pat3.subpatterns)
pprint(pat.shapes) pprint(pat.shapes)
rep = Grid(a_vector=[1e4, 0], new_name = lib.get_name(cell_name)
b_vector=[0, 1.5e4], lib[new_name] = pat.copy()
a_count=3, print(f'\nAdded a copy of {cell_name} as {new_name}')
b_count=2,)
pat4 = Pattern('aref_test') pat3 = Pattern()
pat4.subpatterns = [ pat3.refs = [
SubPattern(pat, repetition=rep, offset=(1e5, 3e5)), Ref(cell_name, offset=(1e5, 3e5), annotations={'4': ['Hello I am the base Ref']}),
SubPattern(pat, repetition=rep, offset=(2e5, 3e5), rotation=pi/3), Ref(cell_name, offset=(2e5, 3e5), rotation=pi/3),
SubPattern(pat, repetition=rep, offset=(3e5, 3e5), rotation=pi/2), Ref(cell_name, offset=(3e5, 3e5), rotation=pi/2),
SubPattern(pat, repetition=rep, offset=(4e5, 3e5), rotation=pi), Ref(cell_name, offset=(4e5, 3e5), rotation=pi),
SubPattern(pat, repetition=rep, offset=(5e5, 3e5), rotation=3*pi/2), Ref(cell_name, offset=(5e5, 3e5), rotation=3*pi/2),
SubPattern(pat, repetition=rep, mirrored=(True, False), offset=(1e5, 4e5)), Ref(cell_name, mirrored=(True, False), offset=(1e5, 4e5)),
SubPattern(pat, repetition=rep, mirrored=(True, False), offset=(2e5, 4e5), rotation=pi/3), Ref(cell_name, mirrored=(True, False), offset=(2e5, 4e5), rotation=pi/3),
SubPattern(pat, repetition=rep, mirrored=(True, False), offset=(3e5, 4e5), rotation=pi/2), Ref(cell_name, mirrored=(True, False), offset=(3e5, 4e5), rotation=pi/2),
SubPattern(pat, repetition=rep, mirrored=(True, False), offset=(4e5, 4e5), rotation=pi), Ref(cell_name, mirrored=(True, False), offset=(4e5, 4e5), rotation=pi),
SubPattern(pat, repetition=rep, mirrored=(True, False), offset=(5e5, 4e5), rotation=3*pi/2), Ref(cell_name, mirrored=(True, False), offset=(5e5, 4e5), rotation=3*pi/2),
SubPattern(pat, repetition=rep, mirrored=(False, True), offset=(1e5, 5e5)), Ref(cell_name, mirrored=(False, True), offset=(1e5, 5e5)),
SubPattern(pat, repetition=rep, mirrored=(False, True), offset=(2e5, 5e5), rotation=pi/3), Ref(cell_name, mirrored=(False, True), offset=(2e5, 5e5), rotation=pi/3),
SubPattern(pat, repetition=rep, mirrored=(False, True), offset=(3e5, 5e5), rotation=pi/2), Ref(cell_name, mirrored=(False, True), offset=(3e5, 5e5), rotation=pi/2),
SubPattern(pat, repetition=rep, mirrored=(False, True), offset=(4e5, 5e5), rotation=pi), Ref(cell_name, mirrored=(False, True), offset=(4e5, 5e5), rotation=pi),
SubPattern(pat, repetition=rep, mirrored=(False, True), offset=(5e5, 5e5), rotation=3*pi/2), Ref(cell_name, mirrored=(False, True), offset=(5e5, 5e5), rotation=3*pi/2),
SubPattern(pat, repetition=rep, mirrored=(True, True), offset=(1e5, 6e5)), Ref(cell_name, mirrored=(True, True), offset=(1e5, 6e5)),
SubPattern(pat, repetition=rep, mirrored=(True, True), offset=(2e5, 6e5), rotation=pi/3), Ref(cell_name, mirrored=(True, True), offset=(2e5, 6e5), rotation=pi/3),
SubPattern(pat, repetition=rep, mirrored=(True, True), offset=(3e5, 6e5), rotation=pi/2), Ref(cell_name, mirrored=(True, True), offset=(3e5, 6e5), rotation=pi/2),
SubPattern(pat, repetition=rep, mirrored=(True, True), offset=(4e5, 6e5), rotation=pi), Ref(cell_name, mirrored=(True, True), offset=(4e5, 6e5), rotation=pi),
SubPattern(pat, repetition=rep, mirrored=(True, True), offset=(5e5, 6e5), rotation=3*pi/2), Ref(cell_name, mirrored=(True, True), offset=(5e5, 6e5), rotation=3*pi/2),
] ]
folder = 'layouts/' lib['sref_test'] = pat3
masque.file.klamath.writefile((pat, pat2, pat3, pat4), folder + 'rep.gds.gz', 1e-9, 1e-3) print('\nAdded sref_test:')
pprint(pat3)
pprint(pat3.refs)
cells = list(masque.file.klamath.readfile(folder + 'rep.gds.gz')[0].values()) rep = Grid(
masque.file.klamath.writefile(cells, folder + 'rerep.gds.gz', 1e-9, 1e-3) a_vector=[1e4, 0],
b_vector=[0, 1.5e4],
a_count=3,
b_count=2,
)
pat4 = Pattern()
pat4.refs = [
Ref(cell_name, repetition=rep, offset=(1e5, 3e5)),
Ref(cell_name, repetition=rep, offset=(2e5, 3e5), rotation=pi/3),
Ref(cell_name, repetition=rep, offset=(3e5, 3e5), rotation=pi/2),
Ref(cell_name, repetition=rep, offset=(4e5, 3e5), rotation=pi),
Ref(cell_name, repetition=rep, offset=(5e5, 3e5), rotation=3*pi/2),
Ref(cell_name, repetition=rep, mirrored=(True, False), offset=(1e5, 4e5)),
Ref(cell_name, repetition=rep, mirrored=(True, False), offset=(2e5, 4e5), rotation=pi/3),
Ref(cell_name, repetition=rep, mirrored=(True, False), offset=(3e5, 4e5), rotation=pi/2),
Ref(cell_name, repetition=rep, mirrored=(True, False), offset=(4e5, 4e5), rotation=pi),
Ref(cell_name, repetition=rep, mirrored=(True, False), offset=(5e5, 4e5), rotation=3*pi/2),
Ref(cell_name, repetition=rep, mirrored=(False, True), offset=(1e5, 5e5)),
Ref(cell_name, repetition=rep, mirrored=(False, True), offset=(2e5, 5e5), rotation=pi/3),
Ref(cell_name, repetition=rep, mirrored=(False, True), offset=(3e5, 5e5), rotation=pi/2),
Ref(cell_name, repetition=rep, mirrored=(False, True), offset=(4e5, 5e5), rotation=pi),
Ref(cell_name, repetition=rep, mirrored=(False, True), offset=(5e5, 5e5), rotation=3*pi/2),
Ref(cell_name, repetition=rep, mirrored=(True, True), offset=(1e5, 6e5)),
Ref(cell_name, repetition=rep, mirrored=(True, True), offset=(2e5, 6e5), rotation=pi/3),
Ref(cell_name, repetition=rep, mirrored=(True, True), offset=(3e5, 6e5), rotation=pi/2),
Ref(cell_name, repetition=rep, mirrored=(True, True), offset=(4e5, 6e5), rotation=pi),
Ref(cell_name, repetition=rep, mirrored=(True, True), offset=(5e5, 6e5), rotation=3*pi/2),
]
masque.file.dxf.writefile(pat4, folder + 'rep.dxf.gz') lib['aref_test'] = pat4
dxf, info = masque.file.dxf.readfile(folder + 'rep.dxf.gz') print('\nAdded aref_test')
masque.file.dxf.writefile(dxf, folder + 'rerep.dxf.gz')
folder = Path('./layouts/')
print(f'...writing files to {folder}...')
gds1 = folder / 'rep.gds.gz'
gds2 = folder / 'rerep.gds.gz'
print(f'Initial write to {gds1}')
gdsii.writefile(lib, gds1, 1e-9, 1e-3)
print(f'Read back and rewrite to {gds2}')
readback_lib, _info = gdsii.readfile(gds1)
gdsii.writefile(readback_lib, gds2, 1e-9, 1e-3)
dxf1 = folder / 'rep.dxf.gz'
dxf2 = folder / 'rerep.dxf.gz'
print(f'Write aref_test to {dxf1}')
dxf.writefile(lib, 'aref_test', dxf1)
print(f'Read back and rewrite to {dxf2}')
dxf_lib, _info = dxf.readfile(dxf1)
print(Library(dxf_lib))
dxf.writefile(dxf_lib, 'Model', dxf2)
layer_map = {'base': (0,0), 'mylabel': (1,2)} layer_map = {'base': (0,0), 'mylabel': (1,2)}
masque.file.oasis.writefile((pat, pat2, pat3, pat4), folder + 'rep.oas.gz', 1000, layer_map=layer_map) oas1 = folder / 'rep.oas'
oas, info = masque.file.oasis.readfile(folder + 'rep.oas.gz') oas2 = folder / 'rerep.oas'
masque.file.oasis.writefile(list(oas.values()), folder + 'rerep.oas.gz', 1000, layer_map=layer_map) print(f'Write lib to {oas1}')
print(info) oasis.writefile(lib, oas1, 1000, layer_map=layer_map)
print(f'Read back and rewrite to {oas2}')
oas_lib, oas_info = oasis.readfile(oas1)
oasis.writefile(oas_lib, oas2, 1000, layer_map=layer_map)
print('OASIS info:')
pprint(oas_info)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -0,0 +1 @@
TODO write tutorial readme

View File

@ -1,21 +1,21 @@
from typing import Tuple, Sequence from typing import Sequence
import numpy import numpy
from numpy import pi from numpy import pi
from masque import layer_t, Pattern, SubPattern, Label from masque import (
from masque.shapes import Circle, Arc, Polygon layer_t, Pattern, Label, Port,
from masque.builder import Device, Port Circle, Arc, Polygon,
from masque.library import Library, DeviceLibrary )
import masque.file.gdsii import masque.file.gdsii
# Note that masque units are arbitrary, and are only given # Note that masque units are arbitrary, and are only given
# physical significance when writing to a file. # physical significance when writing to a file.
GDS_OPTS = { GDS_OPTS = dict(
'meters_per_unit': 1e-9, # GDS database unit, 1 nanometer meters_per_unit = 1e-9, # GDS database unit, 1 nanometer
'logical_units_per_unit': 1e-3, # GDS display unit, 1 micron logical_units_per_unit = 1e-3, # GDS display unit, 1 micron
} )
def hole( def hole(
@ -30,10 +30,10 @@ def hole(
layer: Layer to draw the circle on. layer: Layer to draw the circle on.
Returns: Returns:
Pattern, named `'hole'` Pattern containing a circle.
""" """
pat = Pattern('hole', shapes=[ pat = Pattern(shapes=[
Circle(radius=radius, offset=(0, 0), layer=layer) Circle(radius=radius, offset=(0, 0), layer=layer),
]) ])
return pat return pat
@ -50,7 +50,7 @@ def triangle(
layer: Layer to draw the circle on. layer: Layer to draw the circle on.
Returns: Returns:
Pattern, named `'triangle'` Pattern containing a triangle
""" """
vertices = numpy.array([ vertices = numpy.array([
(numpy.cos( pi / 2), numpy.sin( pi / 2)), (numpy.cos( pi / 2), numpy.sin( pi / 2)),
@ -58,7 +58,7 @@ def triangle(
(numpy.cos( - pi / 6), numpy.sin( - pi / 6)), (numpy.cos( - pi / 6), numpy.sin( - pi / 6)),
]) * radius ]) * radius
pat = Pattern('triangle', shapes=[ pat = Pattern(shapes=[
Polygon(offset=(0, 0), layer=layer, vertices=vertices), Polygon(offset=(0, 0), layer=layer, vertices=vertices),
]) ])
return pat return pat
@ -78,37 +78,38 @@ def smile(
secondary_layer: Layer to draw eyes and smile on. secondary_layer: Layer to draw eyes and smile on.
Returns: Returns:
Pattern, named `'smile'` Pattern containing a smiley face
""" """
# Make an empty pattern # Make an empty pattern
pat = Pattern('smile') pat = Pattern()
# Add all the shapes we want # Add all the shapes we want
pat.shapes += [ pat.shapes += [
Circle(radius=radius, offset=(0, 0), layer=layer), # Outer circle Circle(radius=radius, offset=(0, 0), layer=layer), # Outer circle
Circle(radius=radius / 10, offset=(radius / 3, radius / 3), layer=secondary_layer), Circle(radius=radius / 10, offset=(radius / 3, radius / 3), layer=secondary_layer),
Circle(radius=radius / 10, offset=(-radius / 3, radius / 3), layer=secondary_layer), Circle(radius=radius / 10, offset=(-radius / 3, radius / 3), layer=secondary_layer),
Arc(radii=(radius * 2 / 3, radius * 2 / 3), # Underlying ellipse radii Arc(
radii=(radius * 2 / 3, radius * 2 / 3), # Underlying ellipse radii
angles=(7 / 6 * pi, 11 / 6 * pi), # Angles limiting the arc angles=(7 / 6 * pi, 11 / 6 * pi), # Angles limiting the arc
width=radius / 10, width=radius / 10,
offset=(0, 0), offset=(0, 0),
layer=secondary_layer), layer=secondary_layer,
),
] ]
return pat return pat
def main() -> None: def main() -> None:
hole_pat = hole(1000) lib = {}
smile_pat = smile(1000)
tri_pat = triangle(1000)
units_per_meter = 1e-9 lib['hole'] = hole(1000)
units_per_display_unit = 1e-3 lib['smile'] = smile(1000)
lib['triangle'] = triangle(1000)
masque.file.gdsii.writefile([hole_pat, tri_pat, smile_pat], 'basic_shapes.gds', **GDS_OPTS) masque.file.gdsii.writefile(lib, 'basic_shapes.gds', **GDS_OPTS)
smile_pat.visualize() lib['triangle'].visualize()
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -1,12 +1,15 @@
from typing import Tuple, Sequence, Dict # TODO update tutorials
from typing import Sequence, Mapping
import numpy import numpy
from numpy import pi from numpy import pi
from masque import layer_t, Pattern, SubPattern, Label from masque import (
from masque.shapes import Polygon layer_t, Pattern, Ref, Label, Builder, Port, Polygon,
from masque.builder import Device, Port, port_utils Library, ILibraryView,
from masque.file.gdsii import writefile )
from masque.utils import ports2data
from masque.file.gdsii import writefile, check_valid_names
import pcgen import pcgen
import basic_shapes import basic_shapes
@ -17,40 +20,41 @@ LATTICE_CONSTANT = 512
RADIUS = LATTICE_CONSTANT / 2 * 0.75 RADIUS = LATTICE_CONSTANT / 2 * 0.75
def dev2pat(dev: Device) -> Pattern: def ports_to_data(pat: Pattern) -> Pattern:
""" """
Bake port information into the device. Bake port information into the pattern.
This places a label at each port location on layer (3, 0) with text content This places a label at each port location on layer (3, 0) with text content
'name:ptype angle_deg' 'name:ptype angle_deg'
""" """
return port_utils.dev2pat(dev, layer=(3, 0)) return ports2data.ports_to_data(pat, layer=(3, 0))
def pat2dev(pat: Pattern) -> Device: def data_to_ports(lib: Mapping[str, Pattern], name: str, pat: Pattern) -> Pattern:
""" """
Scans the Pattern to determine port locations. Same format as `dev2pat` Scans the Pattern to determine port locations. Same port format as `ports_to_data`
""" """
return port_utils.pat2dev(pat, layers=[(3, 0)]) return ports2data.data_to_ports(layers=[(3, 0)], library=lib, pattern=pat, name=name)
def perturbed_l3( def perturbed_l3(
lattice_constant: float, lattice_constant: float,
hole: Pattern, hole: str,
trench_dose: float = 1.0, hole_lib: Mapping[str, Pattern],
trench_layer: layer_t = (1, 0), trench_layer: layer_t = (1, 0),
shifts_a: Sequence[float] = (0.15, 0, 0.075), shifts_a: Sequence[float] = (0.15, 0, 0.075),
shifts_r: Sequence[float] = (1.0, 1.0, 1.0), shifts_r: Sequence[float] = (1.0, 1.0, 1.0),
xy_size: Tuple[int, int] = (10, 10), xy_size: tuple[int, int] = (10, 10),
perturbed_radius: float = 1.1, perturbed_radius: float = 1.1,
trench_width: float = 1200, trench_width: float = 1200,
) -> Device: ) -> Pattern:
""" """
Generate a `Device` representing a perturbed L3 cavity. Generate a `Pattern` representing a perturbed L3 cavity.
Args: Args:
lattice_constant: Distance between nearest neighbor holes lattice_constant: Distance between nearest neighbor holes
hole: `Pattern` object containing a single hole hole: name of a `Pattern` containing a single hole
trench_dose: Dose for the trenches. Default 1.0. (Hole dose is 1.0.) hole_lib: Library which contains the `Pattern` object for hole.
Necessary because we need to know how big it is...
trench_layer: Layer for the trenches, default `(1, 0)`. trench_layer: Layer for the trenches, default `(1, 0)`.
shifts_a: passed to `pcgen.l3_shift`; specifies lattice constant shifts_a: passed to `pcgen.l3_shift`; specifies lattice constant
(1 - multiplicative factor) for shifting holes adjacent to (1 - multiplicative factor) for shifting holes adjacent to
@ -66,8 +70,10 @@ def perturbed_l3(
trench width: Width of the undercut trenches. Default 1200. trench width: Width of the undercut trenches. Default 1200.
Returns: Returns:
`Device` object representing the L3 design. `Pattern` object representing the L3 design.
""" """
print('Generating perturbed L3...')
# Get hole positions and radii # Get hole positions and radii
xyr = pcgen.l3_shift_perturbed_defect(mirror_dims=xy_size, xyr = pcgen.l3_shift_perturbed_defect(mirror_dims=xy_size,
perturbed_radius=perturbed_radius, perturbed_radius=perturbed_radius,
@ -75,188 +81,197 @@ def perturbed_l3(
shifts_r=shifts_r) shifts_r=shifts_r)
# Build L3 cavity, using references to the provided hole pattern # Build L3 cavity, using references to the provided hole pattern
pat = Pattern(f'L3p-a{lattice_constant:g}rp{perturbed_radius:g}') pat = Pattern()
pat.subpatterns += [ pat.refs += [
SubPattern(hole, scale=r, Ref(hole, scale=r, offset=(lattice_constant * x,
offset=(lattice_constant * x,
lattice_constant * y)) lattice_constant * y))
for x, y, r in xyr] for x, y, r in xyr]
# Add rectangular undercut aids # Add rectangular undercut aids
min_xy, max_xy = pat.get_bounds_nonempty() min_xy, max_xy = pat.get_bounds_nonempty(hole_lib)
trench_dx = max_xy[0] - min_xy[0] trench_dx = max_xy[0] - min_xy[0]
pat.shapes += [ pat.shapes += [
Polygon.rect(ymin=max_xy[1], xmin=min_xy[0], lx=trench_dx, ly=trench_width, Polygon.rect(ymin=max_xy[1], xmin=min_xy[0], lx=trench_dx, ly=trench_width, layer=trench_layer),
layer=trench_layer, dose=trench_dose), Polygon.rect(ymax=min_xy[1], xmin=min_xy[0], lx=trench_dx, ly=trench_width, layer=trench_layer),
Polygon.rect(ymax=min_xy[1], xmin=min_xy[0], lx=trench_dx, ly=trench_width,
layer=trench_layer, dose=trench_dose),
] ]
# Ports are at outer extents of the device (with y=0) # Ports are at outer extents of the device (with y=0)
extent = lattice_constant * xy_size[0] extent = lattice_constant * xy_size[0]
ports = { pat.ports = dict(
'input': Port((-extent, 0), rotation=0, ptype='pcwg'), input=Port((-extent, 0), rotation=0, ptype='pcwg'),
'output': Port((extent, 0), rotation=pi, ptype='pcwg'), output=Port((extent, 0), rotation=pi, ptype='pcwg'),
} )
return Device(pat, ports) ports_to_data(pat)
return pat
def waveguide( def waveguide(
lattice_constant: float, lattice_constant: float,
hole: Pattern, hole: str,
length: int, length: int,
mirror_periods: int, mirror_periods: int,
) -> Device: ) -> Pattern:
""" """
Generate a `Device` representing a photonic crystal line-defect waveguide. Generate a `Pattern` representing a photonic crystal line-defect waveguide.
Args: Args:
lattice_constant: Distance between nearest neighbor holes lattice_constant: Distance between nearest neighbor holes
hole: `Pattern` object containing a single hole hole: name of a `Pattern` containing a single hole
length: Distance (number of mirror periods) between the input and output ports. length: Distance (number of mirror periods) between the input and output ports.
Ports are placed at lattice sites. Ports are placed at lattice sites.
mirror_periods: Number of hole rows on each side of the line defect mirror_periods: Number of hole rows on each side of the line defect
Returns: Returns:
`Device` object representing the waveguide. `Pattern` object representing the waveguide.
""" """
# Generate hole locations # Generate hole locations
xy = pcgen.waveguide(length=length, num_mirror=mirror_periods) xy = pcgen.waveguide(length=length, num_mirror=mirror_periods)
# Build the pattern # Build the pattern
pat = Pattern(f'_wg-a{lattice_constant:g}l{length}') pat = Pattern()
pat.subpatterns += [SubPattern(hole, offset=(lattice_constant * x, pat.refs += [
Ref(hole, offset=(lattice_constant * x,
lattice_constant * y)) lattice_constant * y))
for x, y in xy] for x, y in xy]
# Ports are at outer edges, with y=0 # Ports are at outer edges, with y=0
extent = lattice_constant * length / 2 extent = lattice_constant * length / 2
ports = { pat.ports = dict(
'left': Port((-extent, 0), rotation=0, ptype='pcwg'), left=Port((-extent, 0), rotation=0, ptype='pcwg'),
'right': Port((extent, 0), rotation=pi, ptype='pcwg'), right=Port((extent, 0), rotation=pi, ptype='pcwg'),
} )
return Device(pat, ports)
ports_to_data(pat)
return pat
def bend( def bend(
lattice_constant: float, lattice_constant: float,
hole: Pattern, hole: str,
mirror_periods: int, mirror_periods: int,
) -> Device: ) -> Pattern:
""" """
Generate a `Device` representing a 60-degree counterclockwise bend in a photonic crystal Generate a `Pattern` representing a 60-degree counterclockwise bend in a photonic crystal
line-defect waveguide. line-defect waveguide.
Args: Args:
lattice_constant: Distance between nearest neighbor holes lattice_constant: Distance between nearest neighbor holes
hole: `Pattern` object containing a single hole hole: name of a `Pattern` containing a single hole
mirror_periods: Minimum number of mirror periods on each side of the line defect. mirror_periods: Minimum number of mirror periods on each side of the line defect.
Returns: Returns:
`Device` object representing the waveguide bend. `Pattern` object representing the waveguide bend.
Ports are named 'left' (input) and 'right' (output). Ports are named 'left' (input) and 'right' (output).
""" """
# Generate hole locations # Generate hole locations
xy = pcgen.wgbend(num_mirror=mirror_periods) xy = pcgen.wgbend(num_mirror=mirror_periods)
# Build the pattern # Build the pattern
pat= Pattern(f'_wgbend-a{lattice_constant:g}l{mirror_periods}') pat= Pattern()
pat.subpatterns += [ pat.refs += [
SubPattern(hole, offset=(lattice_constant * x, Ref(hole, offset=(lattice_constant * x,
lattice_constant * y)) lattice_constant * y))
for x, y in xy] for x, y in xy]
# Figure out port locations. # Figure out port locations.
extent = lattice_constant * mirror_periods extent = lattice_constant * mirror_periods
ports = { pat.ports = dict(
'left': Port((-extent, 0), rotation=0, ptype='pcwg'), left=Port((-extent, 0), rotation=0, ptype='pcwg'),
'right': Port((extent / 2, right=Port((extent / 2,
extent * numpy.sqrt(3) / 2), extent * numpy.sqrt(3) / 2),
rotation=pi * 4 / 3, ptype='pcwg'), rotation=pi * 4 / 3, ptype='pcwg'),
} )
return Device(pat, ports) ports_to_data(pat)
return pat
def y_splitter( def y_splitter(
lattice_constant: float, lattice_constant: float,
hole: Pattern, hole: str,
mirror_periods: int, mirror_periods: int,
) -> Device: ) -> Pattern:
""" """
Generate a `Device` representing a photonic crystal line-defect waveguide y-splitter. Generate a `Pattern` representing a photonic crystal line-defect waveguide y-splitter.
Args: Args:
lattice_constant: Distance between nearest neighbor holes lattice_constant: Distance between nearest neighbor holes
hole: `Pattern` object containing a single hole hole: name of a `Pattern` containing a single hole
mirror_periods: Minimum number of mirror periods on each side of the line defect. mirror_periods: Minimum number of mirror periods on each side of the line defect.
Returns: Returns:
`Device` object representing the y-splitter. `Pattern` object representing the y-splitter.
Ports are named 'in', 'top', and 'bottom'. Ports are named 'in', 'top', and 'bottom'.
""" """
# Generate hole locations # Generate hole locations
xy = pcgen.y_splitter(num_mirror=mirror_periods) xy = pcgen.y_splitter(num_mirror=mirror_periods)
# Build pattern # Build pattern
pat = Pattern(f'_wgsplit_half-a{lattice_constant:g}l{mirror_periods}') pat = Pattern()
pat.subpatterns += [ pat.refs += [
SubPattern(hole, offset=(lattice_constant * x, Ref(hole, offset=(lattice_constant * x,
lattice_constant * y)) lattice_constant * y))
for x, y in xy] for x, y in xy]
# Determine port locations # Determine port locations
extent = lattice_constant * mirror_periods extent = lattice_constant * mirror_periods
ports = { pat.ports = {
'in': Port((-extent, 0), rotation=0, ptype='pcwg'), 'in': Port((-extent, 0), rotation=0, ptype='pcwg'),
'top': Port((extent / 2, extent * numpy.sqrt(3) / 2), rotation=pi * 4 / 3, ptype='pcwg'), 'top': Port((extent / 2, extent * numpy.sqrt(3) / 2), rotation=pi * 4 / 3, ptype='pcwg'),
'bot': Port((extent / 2, -extent * numpy.sqrt(3) / 2), rotation=pi * 2 / 3, ptype='pcwg'), 'bot': Port((extent / 2, -extent * numpy.sqrt(3) / 2), rotation=pi * 2 / 3, ptype='pcwg'),
} }
return Device(pat, ports)
ports_to_data(pat)
return pat
def main(interactive: bool = True): def main(interactive: bool = True) -> None:
# Generate some basic hole patterns # Generate some basic hole patterns
smile = basic_shapes.smile(RADIUS) shape_lib = {
hole = basic_shapes.hole(RADIUS) 'smile': basic_shapes.smile(RADIUS),
'hole': basic_shapes.hole(RADIUS),
}
# Build some devices # Build some devices
a = LATTICE_CONSTANT a = LATTICE_CONSTANT
wg10 = waveguide(lattice_constant=a, hole=hole, length=10, mirror_periods=5).rename('wg10')
wg05 = waveguide(lattice_constant=a, hole=hole, length=5, mirror_periods=5).rename('wg05')
wg28 = waveguide(lattice_constant=a, hole=hole, length=28, mirror_periods=5).rename('wg28')
bend0 = bend(lattice_constant=a, hole=hole, mirror_periods=5).rename('bend0')
ysplit = y_splitter(lattice_constant=a, hole=hole, mirror_periods=5).rename('ysplit')
l3cav = perturbed_l3(lattice_constant=a, hole=smile, xy_size=(4, 10)).rename('l3cav') # uses smile :)
# Autogenerate port labels so that GDS will also contain port data devices = {}
for device in [wg10, wg05, wg28, l3cav, ysplit, bend0]: devices['wg05'] = waveguide(lattice_constant=a, hole='hole', length=5, mirror_periods=5)
dev2pat(device) devices['wg10'] = waveguide(lattice_constant=a, hole='hole', length=10, mirror_periods=5)
devices['wg28'] = waveguide(lattice_constant=a, hole='hole', length=28, mirror_periods=5)
devices['wg90'] = waveguide(lattice_constant=a, hole='hole', length=90, mirror_periods=5)
devices['bend0'] = bend(lattice_constant=a, hole='hole', mirror_periods=5)
devices['ysplit'] = y_splitter(lattice_constant=a, hole='hole', mirror_periods=5)
devices['l3cav'] = perturbed_l3(lattice_constant=a, hole='smile', hole_lib=shape_lib, xy_size=(4, 10)) # uses smile :)
# Turn our dict of devices into a Library -- useful for getting abstracts
lib = Library(devices)
abv = lib.abstract_view() # lets us use abv[cell] instead of lib.abstract(cell)
# #
# Build a circuit # Build a circuit
# #
circ = Device(name='my_circuit', ports={}) circ = Builder(library=lib)
# Start by placing a waveguide. Call its ports "in" and "signal". # Start by placing a waveguide. Call its ports "in" and "signal".
circ.place(wg10, offset=(0, 0), port_map={'left': 'in', 'right': 'signal'}) circ.place(abv['wg10'], offset=(0, 0), port_map={'left': 'in', 'right': 'signal'})
# Extend the signal path by attaching the "left" port of a waveguide. # Extend the signal path by attaching the "left" port of a waveguide.
# Since there is only one other port ("right") on the waveguide we # Since there is only one other port ("right") on the waveguide we
# are attaching (wg10), it automatically inherits the name "signal". # are attaching (wg10), it automatically inherits the name "signal".
circ.plug(wg10, {'signal': 'left'}) circ.plug(abv['wg10'], {'signal': 'left'})
# Attach a y-splitter to the signal path. # Attach a y-splitter to the signal path.
# Since the y-splitter has 3 ports total, we can't auto-inherit the # Since the y-splitter has 3 ports total, we can't auto-inherit the
# port name, so we have to specify what we want to name the unattached # port name, so we have to specify what we want to name the unattached
# ports. We can call them "signal1" and "signal2". # ports. We can call them "signal1" and "signal2".
circ.plug(ysplit, {'signal': 'in'}, {'top': 'signal1', 'bot': 'signal2'}) circ.plug(abv['ysplit'], {'signal': 'in'}, {'top': 'signal1', 'bot': 'signal2'})
# Add a waveguide to both signal ports, inheriting their names. # Add a waveguide to both signal ports, inheriting their names.
circ.plug(wg05, {'signal1': 'left'}) circ.plug(abv['wg05'], {'signal1': 'left'})
circ.plug(wg05, {'signal2': 'left'}) circ.plug(abv['wg05'], {'signal2': 'left'})
# Add a bend to both ports. # Add a bend to both ports.
# Our bend's ports "left" and "right" refer to the original counterclockwise # Our bend's ports "left" and "right" refer to the original counterclockwise
@ -265,22 +280,22 @@ def main(interactive: bool = True):
# to "signal2" to bend counterclockwise. # to "signal2" to bend counterclockwise.
# We could also use `mirrored=(True, False)` to mirror one of the devices # We could also use `mirrored=(True, False)` to mirror one of the devices
# and then use same device port on both paths. # and then use same device port on both paths.
circ.plug(bend0, {'signal1': 'right'}) circ.plug(abv['bend0'], {'signal1': 'right'})
circ.plug(bend0, {'signal2': 'left'}) circ.plug(abv['bend0'], {'signal2': 'left'})
# We add some waveguides and a cavity to "signal1". # We add some waveguides and a cavity to "signal1".
circ.plug(wg10, {'signal1': 'left'}) circ.plug(abv['wg10'], {'signal1': 'left'})
circ.plug(l3cav, {'signal1': 'input'}) circ.plug(abv['l3cav'], {'signal1': 'input'})
circ.plug(wg10, {'signal1': 'left'}) circ.plug(abv['wg10'], {'signal1': 'left'})
# "signal2" just gets a single of equivalent length # "signal2" just gets a single of equivalent length
circ.plug(wg28, {'signal2': 'left'}) circ.plug(abv['wg28'], {'signal2': 'left'})
# Now we bend both waveguides back towards each other # Now we bend both waveguides back towards each other
circ.plug(bend0, {'signal1': 'right'}) circ.plug(abv['bend0'], {'signal1': 'right'})
circ.plug(bend0, {'signal2': 'left'}) circ.plug(abv['bend0'], {'signal2': 'left'})
circ.plug(wg05, {'signal1': 'left'}) circ.plug(abv['wg05'], {'signal1': 'left'})
circ.plug(wg05, {'signal2': 'left'}) circ.plug(abv['wg05'], {'signal2': 'left'})
# To join the waveguides, we attach a second y-junction. # To join the waveguides, we attach a second y-junction.
# We plug "signal1" into the "bot" port, and "signal2" into the "top" port. # We plug "signal1" into the "bot" port, and "signal2" into the "top" port.
@ -288,23 +303,37 @@ def main(interactive: bool = True):
# This operation would raise an exception if the ports did not line up # This operation would raise an exception if the ports did not line up
# correctly (i.e. they required different rotations or translations of the # correctly (i.e. they required different rotations or translations of the
# y-junction device). # y-junction device).
circ.plug(ysplit, {'signal1': 'bot', 'signal2': 'top'}, {'in': 'signal_out'}) circ.plug(abv['ysplit'], {'signal1': 'bot', 'signal2': 'top'}, {'in': 'signal_out'})
# Finally, add some more waveguide to "signal_out". # Finally, add some more waveguide to "signal_out".
circ.plug(wg10, {'signal_out': 'left'}) circ.plug(abv['wg10'], {'signal_out': 'left'})
# We can visualize the design. Usually it's easier to just view the GDS.
if interactive:
print('Visualizing... this step may be slow')
circ.pattern.visualize()
# We can also add text labels for our circuit's ports. # We can also add text labels for our circuit's ports.
# They will appear at the uppermost hierarchy level, while the individual # They will appear at the uppermost hierarchy level, while the individual
# device ports will appear further down, in their respective cells. # device ports will appear further down, in their respective cells.
dev2pat(circ) ports_to_data(circ.pattern)
# Write out to GDS # Add the pattern into our library
writefile(circ.pattern, 'circuit.gds', **GDS_OPTS) lib['my_circuit'] = circ.pattern
# Check if we forgot to include any patterns... ooops!
if dangling := lib.dangling_refs():
print('Warning: The following patterns are referenced, but not present in the'
f' library! {dangling}')
print('We\'ll solve this by merging in shape_lib, which contains those shapes...')
lib.add(shape_lib)
assert not lib.dangling_refs()
# We can visualize the design. Usually it's easier to just view the GDS.
if interactive:
print('Visualizing... this step may be slow')
circ.pattern.visualize(lib)
#Write out to GDS, only keeping patterns referenced by our circuit (including itself)
subtree = lib.subtree('my_circuit') # don't include wg90, which we don't use
check_valid_names(subtree.keys())
writefile(subtree, 'circuit.gds', **GDS_OPTS)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -1,81 +1,81 @@
from typing import Tuple, Sequence, Callable from typing import Sequence, Callable
from pprint import pformat from pprint import pformat
import numpy import numpy
from numpy import pi from numpy import pi
from masque.builder import Device from masque import Pattern, Builder, LazyLibrary
from masque.library import Library, LibDeviceLibrary
from masque.file.gdsii import writefile, load_libraryfile from masque.file.gdsii import writefile, load_libraryfile
import pcgen import pcgen
import basic_shapes import basic_shapes
import devices import devices
from devices import pat2dev, dev2pat from devices import ports_to_data, data_to_ports
from basic_shapes import GDS_OPTS from basic_shapes import GDS_OPTS
def main() -> None: def main() -> None:
# Define a `Library`-backed `DeviceLibrary`, which provides lazy evaluation # Define a `LazyLibrary`, which provides lazy evaluation for generating
# for device generation code and lazy-loading of GDS contents. # patterns and lazy-loading of GDS contents.
device_lib = LibDeviceLibrary() lib = LazyLibrary()
# #
# Load some devices from a GDS file # Load some devices from a GDS file
# #
# Scan circuit.gds and prepare to lazy-load its contents # Scan circuit.gds and prepare to lazy-load its contents
pattern_lib, _properties = load_libraryfile('circuit.gds', tag='mycirc01') gds_lib, _properties = load_libraryfile('circuit.gds', postprocess=data_to_ports)
# Add it into the device library by providing a way to read port info # Add it into the device library by providing a way to read port info
# This maintains the lazy evaluation from above, so no patterns # This maintains the lazy evaluation from above, so no patterns
# are actually read yet. # are actually read yet.
device_lib.add_library(pattern_lib, pat2dev=pat2dev) lib.add(gds_lib)
print('Devices loaded from GDS into library:\n' + pformat(list(device_lib.keys())))
print('Patterns loaded from GDS into library:\n' + pformat(list(lib.keys())))
# #
# Add some new devices to the library, this time from python code rather than GDS # Add some new devices to the library, this time from python code rather than GDS
# #
a = devices.LATTICE_CONSTANT lib['triangle'] = lambda: basic_shapes.triangle(devices.RADIUS)
tri = basic_shapes.triangle(devices.RADIUS) opts = dict(
lattice_constant = devices.LATTICE_CONSTANT,
# Convenience function for adding devices hole = 'triangle',
# This is roughly equivalent to )
# `device_lib[name] = lambda: dev2pat(fn())`
# but it also guarantees that the resulting pattern is named `name`.
def add(name: str, fn: Callable[[], Device]) -> None:
device_lib.add_device(name=name, fn=fn, dev2pat=dev2pat)
# Triangle-based variants. These are defined here, but they won't run until they're # Triangle-based variants. These are defined here, but they won't run until they're
# retrieved from the library. # retrieved from the library.
add('tri_wg10', lambda: devices.waveguide(lattice_constant=a, hole=tri, length=10, mirror_periods=5)) lib['tri_wg10'] = lambda: devices.waveguide(length=10, mirror_periods=5, **opts)
add('tri_wg05', lambda: devices.waveguide(lattice_constant=a, hole=tri, length=5, mirror_periods=5)) lib['tri_wg05'] = lambda: devices.waveguide(length=5, mirror_periods=5, **opts)
add('tri_wg28', lambda: devices.waveguide(lattice_constant=a, hole=tri, length=28, mirror_periods=5)) lib['tri_wg28'] = lambda: devices.waveguide(length=28, mirror_periods=5, **opts)
add('tri_bend0', lambda: devices.bend(lattice_constant=a, hole=tri, mirror_periods=5)) lib['tri_bend0'] = lambda: devices.bend(mirror_periods=5, **opts)
add('tri_ysplit', lambda: devices.y_splitter(lattice_constant=a, hole=tri, mirror_periods=5)) lib['tri_ysplit'] = lambda: devices.y_splitter(mirror_periods=5, **opts)
add('tri_l3cav', lambda: devices.perturbed_l3(lattice_constant=a, hole=tri, xy_size=(4, 10))) lib['tri_l3cav'] = lambda: devices.perturbed_l3(xy_size=(4, 10), **opts, hole_lib=lib)
# #
# Build a mixed waveguide with an L3 cavity in the middle # Build a mixed waveguide with an L3 cavity in the middle
# #
# Immediately start building from an instance of the L3 cavity # Immediately start building from an instance of the L3 cavity
circ2 = device_lib['tri_l3cav'].build('mixed_wg_cav') circ2 = Builder(library=lib, ports='tri_l3cav')
print(device_lib['wg10'].ports) # First way to get abstracts is `lib.abstract(name)`
circ2.plug(device_lib['wg10'], {'input': 'right'}) circ2.plug(lib.abstract('wg10'), {'input': 'right'})
circ2.plug(device_lib['wg10'], {'output': 'left'})
circ2.plug(device_lib['tri_wg10'], {'input': 'right'}) # Second way to get abstracts is to use an AbstractView
circ2.plug(device_lib['tri_wg10'], {'output': 'left'}) abstracts = lib.abstract_view()
circ2.plug(abstracts['wg10'], {'output': 'left'})
# Third way to specify an abstract works by automatically getting
# it from the library already within the Builder object:
# Just pass the pattern name!
circ2.plug('tri_wg10', {'input': 'right'})
circ2.plug('tri_wg10', {'output': 'left'})
# Add the circuit to the device library. # Add the circuit to the device library.
# It has already been generated, so we can use `set_const` as a shorthand for # It has already been generated, so we can use `set_const` as a shorthand for
# `device_lib['mixed_wg_cav'] = lambda: circ2` # `lib['mixed_wg_cav'] = lambda: circ2.pattern`
device_lib.set_const(circ2) lib.set_const('mixed_wg_cav', circ2.pattern)
# #
@ -83,29 +83,26 @@ def main() -> None:
# #
# We'll be designing against an existing device's interface... # We'll be designing against an existing device's interface...
circ3 = circ2.as_interface('loop_segment') circ3 = Builder.interface(source=circ2)
# ... that lets us continue from where we left off.
circ3.plug(device_lib['tri_bend0'], {'input': 'right'})
circ3.plug(device_lib['tri_bend0'], {'input': 'left'}, mirrored=(True, False)) # mirror since no tri y-symmetry
circ3.plug(device_lib['tri_bend0'], {'input': 'right'})
circ3.plug(device_lib['bend0'], {'output': 'left'})
circ3.plug(device_lib['bend0'], {'output': 'left'})
circ3.plug(device_lib['bend0'], {'output': 'left'})
circ3.plug(device_lib['tri_wg10'], {'input': 'right'})
circ3.plug(device_lib['tri_wg28'], {'input': 'right'})
circ3.plug(device_lib['tri_wg10'], {'input': 'right', 'output': 'left'})
device_lib.set_const(circ3) # ... that lets us continue from where we left off.
circ3.plug('tri_bend0', {'input': 'right'})
circ3.plug('tri_bend0', {'input': 'left'}, mirrored=(True, False)) # mirror since no tri y-symmetry
circ3.plug('tri_bend0', {'input': 'right'})
circ3.plug('bend0', {'output': 'left'})
circ3.plug('bend0', {'output': 'left'})
circ3.plug('bend0', {'output': 'left'})
circ3.plug('tri_wg10', {'input': 'right'})
circ3.plug('tri_wg28', {'input': 'right'})
circ3.plug('tri_wg10', {'input': 'right', 'output': 'left'})
lib.set_const('loop_segment', circ3.pattern)
# #
# Write all devices into a GDS file # Write all devices into a GDS file
# #
print('Writing library to file...')
# This line could be slow, since it generates or loads many of the devices writefile(lib, 'library.gds', **GDS_OPTS)
# since they were not all accessed above.
all_device_pats = [dev.pattern for dev in device_lib.values()]
writefile(all_device_pats, 'library.gds', **GDS_OPTS)
if __name__ == '__main__': if __name__ == '__main__':
@ -116,15 +113,15 @@ if __name__ == '__main__':
#class prout: #class prout:
# def place( # def place(
# self, # self,
# other: Device, # other: Pattern,
# label_layer: layer_t = 'WATLAYER', # label_layer: layer_t = 'WATLAYER',
# *, # *,
# port_map: Optional[Dict[str, Optional[str]]] = None, # port_map: Dict[str, str | None] | None = None,
# **kwargs, # **kwargs,
# ) -> 'prout': # ) -> 'prout':
# #
# Device.place(self, other, port_map=port_map, **kwargs) # Pattern.place(self, other, port_map=port_map, **kwargs)
# name: Optional[str] # name: str | None
# for name in other.ports: # for name in other.ports:
# if port_map: # if port_map:
# assert(name is not None) # assert(name is not None)

View File

@ -2,7 +2,7 @@
Routines for creating normalized 2D lattices and common photonic crystal Routines for creating normalized 2D lattices and common photonic crystal
cavity designs. cavity designs.
""" """
from typing import Sequence, Tuple from typing import Sequence
import numpy import numpy
from numpy.typing import ArrayLike, NDArray from numpy.typing import ArrayLike, NDArray
@ -29,8 +29,11 @@ def triangular_lattice(
Returns: Returns:
`[[x0, y0], [x1, 1], ...]` denoting lattice sites. `[[x0, y0], [x1, 1], ...]` denoting lattice sites.
""" """
sx, sy = numpy.meshgrid(numpy.arange(dims[0], dtype=float), sx, sy = numpy.meshgrid(
numpy.arange(dims[1], dtype=float), indexing='ij') numpy.arange(dims[0], dtype=float),
numpy.arange(dims[1], dtype=float),
indexing='ij',
)
sx[sy % 2 == 1] += 0.5 sx[sy % 2 == 1] += 0.5
sy *= numpy.sqrt(3) / 2 sy *= numpy.sqrt(3) / 2

View File

@ -3,14 +3,14 @@
masque is an attempt to make a relatively small library for designing lithography masque is an attempt to make a relatively small library for designing lithography
masks. The general idea is to implement something resembling the GDSII and OASIS file-formats, masks. The general idea is to implement something resembling the GDSII and OASIS file-formats,
but with some additional vectorized element types (eg. ellipses, not just polygons), better but with some additional vectorized element types (eg. ellipses, not just polygons), and the
support for E-beam doses, and the ability to interface with multiple file formats. ability to interface with multiple file formats.
`Pattern` is a basic object containing a 2D lithography mask, composed of a list of `Shape` `Pattern` is a basic object containing a 2D lithography mask, composed of a list of `Shape`
objects, a list of `Label` objects, and a list of references to other `Patterns` (using objects, a list of `Label` objects, and a list of references to other `Patterns` (using
`SubPattern`). `Ref`).
`SubPattern` provides basic support for nesting `Pattern` objects within each other, by adding `Ref` provides basic support for nesting `Pattern` objects within each other, by adding
offset, rotation, scaling, repetition, and other such properties to a Pattern reference. offset, rotation, scaling, repetition, and other such properties to a Pattern reference.
Note that the methods for these classes try to avoid copying wherever possible, so unless Note that the methods for these classes try to avoid copying wherever possible, so unless
@ -24,17 +24,25 @@
metaclass is used to auto-generate slots based on superclass type annotations. metaclass is used to auto-generate slots based on superclass type annotations.
- File I/O submodules are imported by `masque.file` to avoid creating hard dependencies on - File I/O submodules are imported by `masque.file` to avoid creating hard dependencies on
external file-format reader/writers external file-format reader/writers
- Pattern locking/unlocking is quite slow for large hierarchies.
""" """
from .error import PatternError, PatternLockedError from .utils import layer_t, annotations_t, SupportsBool
from .shapes import Shape from .error import MasqueError, PatternError, LibraryError, BuildError
from .shapes import Shape, Polygon, Path, Circle, Arc, Ellipse
from .label import Label from .label import Label
from .subpattern import SubPattern from .ref import Ref
from .pattern import Pattern from .pattern import Pattern
from .utils import layer_t, annotations_t
from .library import Library, DeviceLibrary from .library import (
ILibraryView, ILibrary,
LibraryView, Library, LazyLibrary,
AbstractView,
)
from .ports import Port, PortList
from .abstract import Abstract
from .builder import Builder, Tool, Pather, RenderPather, render_step_t
from .utils import ports2data, oneshot
__author__ = 'Jan Petykiewicz' __author__ = 'Jan Petykiewicz'

230
masque/abstract.py Normal file
View File

@ -0,0 +1,230 @@
from typing import Self
import copy
import logging
import numpy
from numpy.typing import ArrayLike
from .ref import Ref
from .ports import PortList, Port
from .utils import rotation_matrix_2d, normalize_mirror
#if TYPE_CHECKING:
# from .builder import Builder, Tool
# from .library import ILibrary
logger = logging.getLogger(__name__)
class Abstract(PortList):
__slots__ = ('name', '_ports')
name: str
""" Name of the pattern this device references """
_ports: dict[str, Port]
""" Uniquely-named ports which can be used to instances together"""
@property
def ports(self) -> dict[str, Port]:
return self._ports
@ports.setter
def ports(self, value: dict[str, Port]) -> None:
self._ports = value
def __init__(
self,
name: str,
ports: dict[str, Port],
) -> None:
self.name = name
self.ports = copy.deepcopy(ports)
# def build(
# self,
# library: 'ILibrary',
# tools: 'None | Tool | MutableMapping[str | None, Tool]' = None,
# ) -> 'Builder':
# """
# Begin building a new device around an instance of the current device
# (rather than modifying the current device).
#
# Returns:
# The new `Builder` object.
# """
# pat = Pattern(ports=self.ports)
# pat.ref(self.name)
# new = Builder(library=library, pattern=pat, tools=tools) # TODO should Abstract have tools?
# return new
# TODO do we want to store a Ref instead of just a name? then we can translate/rotate/mirror...
def __repr__(self) -> str:
s = f'<Abstract {self.name} ['
for name, port in self.ports.items():
s += f'\n\t{name}: {port}'
s += ']>'
return s
def translate_ports(self, offset: ArrayLike) -> Self:
"""
Translates all ports by the given offset.
Args:
offset: (x, y) to translate by
Returns:
self
"""
for port in self.ports.values():
port.translate(offset)
return self
def scale_by(self, c: float) -> Self:
"""
Scale this Abstract by the given value
(all port offsets are scaled)
Args:
c: factor to scale by
Returns:
self
"""
for port in self.ports.values():
port.offset *= c
return self
def rotate_around(self, pivot: ArrayLike, rotation: float) -> Self:
"""
Rotate the Abstract around the a location.
Args:
pivot: (x, y) location to rotate around
rotation: Angle to rotate by (counter-clockwise, radians)
Returns:
self
"""
pivot = numpy.array(pivot)
self.translate_ports(-pivot)
self.rotate_ports(rotation)
self.rotate_port_offsets(rotation)
self.translate_ports(+pivot)
return self
def rotate_port_offsets(self, rotation: float) -> Self:
"""
Rotate the offsets of all ports around (0, 0)
Args:
rotation: Angle to rotate by (counter-clockwise, radians)
Returns:
self
"""
for port in self.ports.values():
port.offset = rotation_matrix_2d(rotation) @ port.offset
return self
def rotate_ports(self, rotation: float) -> Self:
"""
Rotate each port around its offset (i.e. in place)
Args:
rotation: Angle to rotate by (counter-clockwise, radians)
Returns:
self
"""
for port in self.ports.values():
port.rotate(rotation)
return self
def mirror_port_offsets(self, across_axis: int) -> Self:
"""
Mirror the offsets of all shapes, labels, and refs across an axis
Args:
across_axis: Axis to mirror across
(0: mirror across x axis, 1: mirror across y axis)
Returns:
self
"""
for port in self.ports.values():
port.offset[across_axis - 1] *= -1
return self
def mirror_ports(self, across_axis: int) -> Self:
"""
Mirror each port's rotation across an axis, relative to its
offset
Args:
across_axis: Axis to mirror across
(0: mirror across x axis, 1: mirror across y axis)
Returns:
self
"""
for port in self.ports.values():
port.mirror(across_axis)
return self
def mirror(self, across_axis: int) -> Self:
"""
Mirror the Pattern across an axis
Args:
axis: Axis to mirror across
(0: mirror across x axis, 1: mirror across y axis)
Returns:
self
"""
self.mirror_ports(across_axis)
self.mirror_port_offsets(across_axis)
return self
def apply_ref_transform(self, ref: Ref) -> Self:
"""
Apply the transform from a `Ref` to the ports of this `Abstract`.
This changes the port locations to where they would be in the Ref's parent pattern.
Args:
ref: The ref whose transform should be applied.
Returns:
self
"""
mirrored_across_x, angle = normalize_mirror(ref.mirrored)
if mirrored_across_x:
self.mirror(across_axis=0)
self.rotate_ports(angle + ref.rotation)
self.rotate_port_offsets(angle + ref.rotation)
self.translate_ports(ref.offset)
return self
def undo_ref_transform(self, ref: Ref) -> Self:
"""
Apply the inverse transform from a `Ref` to the ports of this `Abstract`.
This changes the port locations to where they would be in the Ref's target (from the parent).
Args:
ref: The ref whose (inverse) transform should be applied.
Returns:
self
# TODO test undo_ref_transform
"""
mirrored_across_x, angle = normalize_mirror(ref.mirrored)
self.translate_ports(-ref.offset)
self.rotate_port_offsets(-angle - ref.rotation)
self.rotate_ports(-angle - ref.rotation)
if mirrored_across_x:
self.mirror(across_axis=0)
return self

View File

@ -1,2 +1,5 @@
from .devices import Port, Device from .builder import Builder
from .pather import Pather
from .renderpather import RenderPather
from .utils import ell from .utils import ell
from .tools import Tool, render_step_t

557
masque/builder/builder.py Normal file
View File

@ -0,0 +1,557 @@
from typing import Self, Sequence, Mapping, Literal, overload, Final, cast
import copy
import logging
from numpy import pi
from numpy.typing import ArrayLike
from ..pattern import Pattern
from ..ref import Ref
from ..library import ILibrary
from ..error import PortError, BuildError
from ..ports import PortList, Port
from ..abstract import Abstract
logger = logging.getLogger(__name__)
class Builder(PortList):
"""
TODO DOCUMENT Builder
A `Device` is a combination of a `Pattern` with a set of named `Port`s
which can be used to "snap" devices together to make complex layouts.
`Device`s can be as simple as one or two ports (e.g. an electrical pad
or wire), but can also be used to build and represent a large routed
layout (e.g. a logical block with multiple I/O connections or even a
full chip).
For convenience, ports can be read out using square brackets:
- `device['A'] == Port((0, 0), 0)`
- `device[['A', 'B']] == {'A': Port((0, 0), 0), 'B': Port((0, 0), pi)}`
Examples: Creating a Device
===========================
- `Device(pattern, ports={'A': port_a, 'C': port_c})` uses an existing
pattern and defines some ports.
- `Device(ports=None)` makes a new empty pattern with
default ports ('A' and 'B', in opposite directions, at (0, 0)).
- `my_device.build('my_layout')` makes a new pattern and instantiates
`my_device` in it with offset (0, 0) as a base for further building.
- `my_device.as_interface('my_component', port_map=['A', 'B'])` makes a new
(empty) pattern, copies over ports 'A' and 'B' from `my_device`, and
creates additional ports 'in_A' and 'in_B' facing in the opposite
directions. This can be used to build a device which can plug into
`my_device` (using the 'in_*' ports) but which does not itself include
`my_device` as a subcomponent.
Examples: Adding to a Device
============================
- `my_device.plug(subdevice, {'A': 'C', 'B': 'B'}, map_out={'D': 'myport'})`
instantiates `subdevice` into `my_device`, plugging ports 'A' and 'B'
of `my_device` into ports 'C' and 'B' of `subdevice`. The connected ports
are removed and any unconnected ports from `subdevice` are added to
`my_device`. Port 'D' of `subdevice` (unconnected) is renamed to 'myport'.
- `my_device.plug(wire, {'myport': 'A'})` places port 'A' of `wire` at 'myport'
of `my_device`. If `wire` has only two ports (e.g. 'A' and 'B'), no `map_out`,
argument is provided, and the `inherit_name` argument is not explicitly
set to `False`, the unconnected port of `wire` is automatically renamed to
'myport'. This allows easy extension of existing ports without changing
their names or having to provide `map_out` each time `plug` is called.
- `my_device.place(pad, offset=(10, 10), rotation=pi / 2, port_map={'A': 'gnd'})`
instantiates `pad` at the specified (x, y) offset and with the specified
rotation, adding its ports to those of `my_device`. Port 'A' of `pad` is
renamed to 'gnd' so that further routing can use this signal or net name
rather than the port name on the original `pad` device.
"""
__slots__ = ('pattern', 'library', '_dead')
pattern: Pattern
""" Layout of this device """
library: ILibrary | None
"""
Library from which existing patterns should be referenced, and to which
new ones should be added
"""
_dead: bool
""" If True, plug()/place() are skipped (for debugging)"""
@property
def ports(self) -> dict[str, Port]:
return self.pattern.ports
@ports.setter
def ports(self, value: dict[str, Port]) -> None:
self.pattern.ports = value
def __init__(
self,
library: ILibrary | None = None,
*,
pattern: Pattern | None = None,
ports: str | Mapping[str, Port] | None = None,
name: str | None = None,
) -> None:
"""
# TODO documentation for Builder() constructor
"""
self._dead = False
self.library = library
if pattern is not None:
self.pattern = pattern
else:
self.pattern = Pattern()
if ports is not None:
if self.pattern.ports:
raise BuildError('Ports supplied for pattern with pre-existing ports!')
if isinstance(ports, str):
if library is None:
raise BuildError('Ports given as a string, but `library` was `None`!')
ports = library.abstract(ports).ports
self.pattern.ports.update(copy.deepcopy(dict(ports)))
if name is not None:
if library is None:
raise BuildError('Name was supplied, but no library was given!')
library[name] = self.pattern
@classmethod
def interface(
cls,
source: PortList | Mapping[str, Port] | str,
*,
library: ILibrary | None = None,
in_prefix: str = 'in_',
out_prefix: str = '',
port_map: dict[str, str] | Sequence[str] | None = None,
name: str | None = None,
) -> 'Builder':
"""
Begin building a new device based on all or some of the ports in the
source device. Do not include the source device; instead use it
to define ports (the "interface") for the new device.
The ports specified by `port_map` (default: all ports) are copied to
new device, and additional (input) ports are created facing in the
opposite directions. The specified `in_prefix` and `out_prefix` are
prepended to the port names to differentiate them.
By default, the flipped ports are given an 'in_' prefix and unflipped
ports keep their original names, enabling intuitive construction of
a device that will "plug into" the current device; the 'in_*' ports
are used for plugging the devices together while the original port
names are used for building the new device.
Another use-case could be to build the new device using the 'in_'
ports, creating a new device which could be used in place of the
current device.
Args:
source: A collection of ports (e.g. Pattern, Builder, or dict)
from which to create the interface.
library: Library from which existing patterns should be referenced, TODO
and to which new ones should be added. If not provided,
the source's library will be used (if available).
in_prefix: Prepended to port names for newly-created ports with
reversed directions compared to the current device.
out_prefix: Prepended to port names for ports which are directly
copied from the current device.
port_map: Specification for ports to copy into the new device:
- If `None`, all ports are copied.
- If a sequence, only the listed ports are copied
- If a mapping, the listed ports (keys) are copied and
renamed (to the values).
Returns:
The new builder, with an empty pattern and 2x as many ports as
listed in port_map.
Raises:
`PortError` if `port_map` contains port names not present in the
current device.
`PortError` if applying the prefixes results in duplicate port
names.
"""
if library is None:
if hasattr(source, 'library') and isinstance(source.library, ILibrary):
library = source.library
if isinstance(source, str):
if library is None:
raise BuildError('Source given as a string, but `library` was `None`!')
orig_ports = library.abstract(source).ports
elif isinstance(source, PortList):
orig_ports = source.ports
elif isinstance(source, dict):
orig_ports = source
else:
raise BuildError(f'Unable to get ports from {type(source)}: {source}')
if port_map:
if isinstance(port_map, dict):
missing_inkeys = set(port_map.keys()) - set(orig_ports.keys())
mapped_ports = {port_map[k]: v for k, v in orig_ports.items() if k in port_map}
else:
port_set = set(port_map)
missing_inkeys = port_set - set(orig_ports.keys())
mapped_ports = {k: v for k, v in orig_ports.items() if k in port_set}
if missing_inkeys:
raise PortError(f'`port_map` keys not present in source: {missing_inkeys}')
else:
mapped_ports = orig_ports
ports_in = {f'{in_prefix}{name}': port.deepcopy().rotate(pi)
for name, port in mapped_ports.items()}
ports_out = {f'{out_prefix}{name}': port.deepcopy()
for name, port in mapped_ports.items()}
duplicates = set(ports_out.keys()) & set(ports_in.keys())
if duplicates:
raise PortError(f'Duplicate keys after prefixing, try a different prefix: {duplicates}')
new = Builder(library=library, ports={**ports_in, **ports_out}, name=name)
return new
# @overload
# def plug(
# self,
# other: Abstract | str,
# map_in: dict[str, str],
# map_out: dict[str, str | None] | None,
# *,
# mirrored: tuple[bool, bool],
# inherit_name: bool,
# set_rotation: bool | None,
# append: bool,
# ) -> Self:
# pass
#
# @overload
# def plug(
# self,
# other: Pattern,
# map_in: dict[str, str],
# map_out: dict[str, str | None] | None = None,
# *,
# mirrored: tuple[bool, bool] = (False, False),
# inherit_name: bool = True,
# set_rotation: bool | None = None,
# append: bool = False,
# ) -> Self:
# pass
def plug(
self,
other: Abstract | str | Pattern,
map_in: dict[str, str],
map_out: dict[str, str | None] | None = None,
*,
mirrored: tuple[bool, bool] = (False, False),
inherit_name: bool = True,
set_rotation: bool | None = None,
append: bool = False,
) -> Self:
"""
Instantiate or append a pattern into the current device, connecting
the ports specified by `map_in` and renaming the unconnected
ports specified by `map_out`.
Examples:
=========
- `my_device.plug(lib, 'subdevice', {'A': 'C', 'B': 'B'}, map_out={'D': 'myport'})`
instantiates `lib['subdevice']` into `my_device`, plugging ports 'A' and 'B'
of `my_device` into ports 'C' and 'B' of `subdevice`. The connected ports
are removed and any unconnected ports from `subdevice` are added to
`my_device`. Port 'D' of `subdevice` (unconnected) is renamed to 'myport'.
- `my_device.plug(lib, 'wire', {'myport': 'A'})` places port 'A' of `lib['wire']`
at 'myport' of `my_device`.
If `'wire'` has only two ports (e.g. 'A' and 'B'), no `map_out` argument is
provided, and the `inherit_name` argument is not explicitly set to `False`,
the unconnected port of `wire` is automatically renamed to 'myport'. This
allows easy extension of existing ports without changing their names or
having to provide `map_out` each time `plug` is called.
Args:
other: An `Abstract` describing the device to be instatiated.
map_in: dict of `{'self_port': 'other_port'}` mappings, specifying
port connections between the two devices.
map_out: dict of `{'old_name': 'new_name'}` mappings, specifying
new names for ports in `other`.
mirrored: Enables mirroring `other` across the x or y axes prior
to connecting any ports.
inherit_name: If `True`, and `map_in` specifies only a single port,
and `map_out` is `None`, and `other` has only two ports total,
then automatically renames the output port of `other` to the
name of the port from `self` that appears in `map_in`. This
makes it easy to extend a device with simple 2-port devices
(e.g. wires) without providing `map_out` each time `plug` is
called. See "Examples" above for more info. Default `True`.
set_rotation: If the necessary rotation cannot be determined from
the ports being connected (i.e. all pairs have at least one
port with `rotation=None`), `set_rotation` must be provided
to indicate how much `other` should be rotated. Otherwise,
`set_rotation` must remain `None`.
Returns:
self
Raises:
`PortError` if any ports specified in `map_in` or `map_out` do not
exist in `self.ports` or `other_names`.
`PortError` if there are any duplicate names after `map_in` and `map_out`
are applied.
`PortError` if the specified port mapping is not achieveable (the ports
do not line up)
"""
if self._dead:
logger.error('Skipping plug() since device is dead')
return self
if isinstance(other, str):
if self.library is None:
raise BuildError('No library available, but `other` was a string!')
other = self.library.abstract(other)
# If asked to inherit a name, check that all conditions are met
if (inherit_name
and not map_out
and len(map_in) == 1
and len(other.ports) == 2):
out_port_name = next(iter(set(other.ports.keys()) - set(map_in.values())))
map_out = {out_port_name: next(iter(map_in.keys()))}
if map_out is None:
map_out = {}
map_out = copy.deepcopy(map_out)
self.check_ports(other.ports.keys(), map_in, map_out)
translation, rotation, pivot = self.find_transform(
other,
map_in,
mirrored=mirrored,
set_rotation=set_rotation,
)
# get rid of plugged ports
for ki, vi in map_in.items():
del self.ports[ki]
map_out[vi] = None
if isinstance(other, Pattern):
assert append
self.place(other, offset=translation, rotation=rotation, pivot=pivot,
mirrored=mirrored, port_map=map_out, skip_port_check=True, append=append)
else:
self.place(other, offset=translation, rotation=rotation, pivot=pivot,
mirrored=mirrored, port_map=map_out, skip_port_check=True, append=append)
return self
@overload
def place(
self,
other: Abstract | str,
*,
offset: ArrayLike,
rotation: float,
pivot: ArrayLike,
mirrored: tuple[bool, bool],
port_map: dict[str, str | None] | None,
skip_port_check: bool,
append: bool,
) -> Self:
pass
@overload
def place(
self,
other: Pattern,
*,
offset: ArrayLike,
rotation: float,
pivot: ArrayLike,
mirrored: tuple[bool, bool],
port_map: dict[str, str | None] | None,
skip_port_check: bool,
append: Literal[True],
) -> Self:
pass
def place(
self,
other: Abstract | str | Pattern,
*,
offset: ArrayLike = (0, 0),
rotation: float = 0,
pivot: ArrayLike = (0, 0),
mirrored: tuple[bool, bool] = (False, False),
port_map: dict[str, str | None] | None = None,
skip_port_check: bool = False,
append: bool = False,
) -> Self:
"""
Instantiate or append the device `other` into the current device, adding its
ports to those of the current device (but not connecting any ports).
Mirroring is applied before rotation; translation (`offset`) is applied last.
Examples:
=========
- `my_device.place(pad, offset=(10, 10), rotation=pi / 2, port_map={'A': 'gnd'})`
instantiates `pad` at the specified (x, y) offset and with the specified
rotation, adding its ports to those of `my_device`. Port 'A' of `pad` is
renamed to 'gnd' so that further routing can use this signal or net name
rather than the port name on the original `pad` device.
Args:
other: An `Abstract` describing the device to be instatiated.
offset: Offset at which to place the instance. Default (0, 0).
rotation: Rotation applied to the instance before placement. Default 0.
pivot: Rotation is applied around this pivot point (default (0, 0)).
Rotation is applied prior to translation (`offset`).
mirrored: Whether theinstance should be mirrored across the x and y axes.
Mirroring is applied before translation and rotation.
port_map: dict of `{'old_name': 'new_name'}` mappings, specifying
new names for ports in the instantiated device. New names can be
`None`, which will delete those ports.
skip_port_check: Can be used to skip the internal call to `check_ports`,
in case it has already been performed elsewhere.
Returns:
self
Raises:
`PortError` if any ports specified in `map_in` or `map_out` do not
exist in `self.ports` or `other.ports`.
`PortError` if there are any duplicate names after `map_in` and `map_out`
are applied.
"""
if self._dead:
logger.error('Skipping place() since device is dead')
return self
if isinstance(other, str):
if self.library is None:
raise BuildError('No library available, but `other` was a string!')
other = self.library.abstract(other)
if port_map is None:
port_map = {}
if not skip_port_check:
self.check_ports(other.ports.keys(), map_in=None, map_out=port_map)
ports = {}
for name, port in other.ports.items():
new_name = port_map.get(name, name)
if new_name is None:
continue
ports[new_name] = port
for name, port in ports.items():
p = port.deepcopy()
p.mirror2d(mirrored)
p.rotate_around(pivot, rotation)
p.translate(offset)
self.ports[name] = p
if append:
if isinstance(other, Pattern):
other_pat = other
elif isinstance(other, Abstract):
assert self.library is not None
other_pat = self.library[other.name]
else:
other_pat = self.library[name]
other_copy = other_pat.deepcopy()
other_copy.ports.clear()
other_copy.mirror2d(mirrored)
other_copy.rotate_around(pivot, rotation)
other_copy.translate_elements(offset)
self.pattern.append(other_copy)
else:
assert not isinstance(other, Pattern)
ref = Ref(other.name, mirrored=mirrored)
ref.rotate_around(pivot, rotation)
ref.translate(offset)
self.pattern.refs.append(ref)
return self
def translate(self, offset: ArrayLike) -> Self:
"""
Translate the pattern and all ports.
Args:
offset: (x, y) distance to translate by
Returns:
self
"""
self.pattern.translate_elements(offset)
return self
def rotate_around(self, pivot: ArrayLike, angle: float) -> Self:
"""
Rotate the pattern and all ports.
Args:
angle: angle (radians, counterclockwise) to rotate by
pivot: location to rotate around
Returns:
self
"""
self.pattern.rotate_around(pivot, angle)
for port in self.ports.values():
port.rotate_around(pivot, angle)
return self
def mirror(self, axis: int) -> Self:
"""
Mirror the pattern and all ports across the specified axis.
Args:
axis: Axis to mirror across (x=0, y=1)
Returns:
self
"""
self.pattern.mirror(axis)
for p in self.ports.values():
p.mirror(axis)
return self
def set_dead(self) -> Self:
"""
Disallows further changes through `plug()` or `place()`.
This is meant for debugging:
```
dev.plug(a, ...)
dev.set_dead() # added for debug purposes
dev.plug(b, ...) # usually raises an error, but now skipped
dev.plug(c, ...) # also skipped
dev.pattern.visualize() # shows the device as of the set_dead() call
```
Returns:
self
"""
self._dead = True
return self
def __repr__(self) -> str:
s = f'<Builder {self.pattern} >' # TODO maybe show lib and tools? in builder repr?
return s

View File

@ -1,764 +0,0 @@
from typing import Dict, Iterable, List, Tuple, Union, TypeVar, Any, Iterator, Optional, Sequence
from typing import overload, KeysView, ValuesView
import copy
import warnings
import traceback
import logging
from collections import Counter
import numpy
from numpy import pi
from numpy.typing import ArrayLike, NDArray
from ..pattern import Pattern
from ..subpattern import SubPattern
from ..traits import PositionableImpl, Rotatable, PivotableImpl, Copyable, Mirrorable
from ..utils import AutoSlots, rotation_matrix_2d
from ..error import DeviceError
logger = logging.getLogger(__name__)
P = TypeVar('P', bound='Port')
D = TypeVar('D', bound='Device')
O = TypeVar('O', bound='Device')
class Port(PositionableImpl, Rotatable, PivotableImpl, Copyable, Mirrorable, metaclass=AutoSlots):
"""
A point at which a `Device` can be snapped to another `Device`.
Each port has an `offset` ((x, y) position) and may also have a
`rotation` (orientation) and a `ptype` (port type).
The `rotation` is an angle, in radians, measured counterclockwise
from the +x axis, pointing inwards into the device which owns the port.
The rotation may be set to `None`, indicating that any orientation is
allowed (e.g. for a DC electrical port). It is stored modulo 2pi.
The `ptype` is an arbitrary string, default of `unk` (unknown).
"""
__slots__ = ('ptype', '_rotation')
_rotation: Optional[float]
""" radians counterclockwise from +x, pointing into device body.
Can be `None` to signify undirected port """
ptype: str
""" Port types must match to be plugged together if both are non-zero """
def __init__(
self,
offset: ArrayLike,
rotation: Optional[float],
ptype: str = 'unk',
) -> None:
self.offset = offset
self.rotation = rotation
self.ptype = ptype
@property
def rotation(self) -> Optional[float]:
""" Rotation, radians counterclockwise, pointing into device body. Can be None. """
return self._rotation
@rotation.setter
def rotation(self, val: float) -> None:
if val is None:
self._rotation = None
else:
if not numpy.size(val) == 1:
raise DeviceError('Rotation must be a scalar')
self._rotation = val % (2 * pi)
def get_bounds(self):
return numpy.vstack((self.offset, self.offset))
def set_ptype(self: P, ptype: str) -> P:
""" Chainable setter for `ptype` """
self.ptype = ptype
return self
def mirror(self: P, axis: int) -> P:
self.offset[1 - axis] *= -1
if self.rotation is not None:
self.rotation *= -1
self.rotation += axis * pi
return self
def rotate(self: P, rotation: float) -> P:
if self.rotation is not None:
self.rotation += rotation
return self
def set_rotation(self: P, rotation: Optional[float]) -> P:
self.rotation = rotation
return self
def __repr__(self) -> str:
if self.rotation is None:
rot = 'any'
else:
rot = str(numpy.rad2deg(self.rotation))
return f'<{self.offset}, {rot}, [{self.ptype}]>'
class Device(Copyable, Mirrorable):
"""
A `Device` is a combination of a `Pattern` with a set of named `Port`s
which can be used to "snap" devices together to make complex layouts.
`Device`s can be as simple as one or two ports (e.g. an electrical pad
or wire), but can also be used to build and represent a large routed
layout (e.g. a logical block with multiple I/O connections or even a
full chip).
For convenience, ports can be read out using square brackets:
- `device['A'] == Port((0, 0), 0)`
- `device[['A', 'B']] == {'A': Port((0, 0), 0), 'B': Port((0, 0), pi)}`
Examples: Creating a Device
===========================
- `Device(pattern, ports={'A': port_a, 'C': port_c})` uses an existing
pattern and defines some ports.
- `Device(name='my_dev_name', ports=None)` makes a new empty pattern with
default ports ('A' and 'B', in opposite directions, at (0, 0)).
- `my_device.build('my_layout')` makes a new pattern and instantiates
`my_device` in it with offset (0, 0) as a base for further building.
- `my_device.as_interface('my_component', port_map=['A', 'B'])` makes a new
(empty) pattern, copies over ports 'A' and 'B' from `my_device`, and
creates additional ports 'in_A' and 'in_B' facing in the opposite
directions. This can be used to build a device which can plug into
`my_device` (using the 'in_*' ports) but which does not itself include
`my_device` as a subcomponent.
Examples: Adding to a Device
============================
- `my_device.plug(subdevice, {'A': 'C', 'B': 'B'}, map_out={'D': 'myport'})`
instantiates `subdevice` into `my_device`, plugging ports 'A' and 'B'
of `my_device` into ports 'C' and 'B' of `subdevice`. The connected ports
are removed and any unconnected ports from `subdevice` are added to
`my_device`. Port 'D' of `subdevice` (unconnected) is renamed to 'myport'.
- `my_device.plug(wire, {'myport': 'A'})` places port 'A' of `wire` at 'myport'
of `my_device`. If `wire` has only two ports (e.g. 'A' and 'B'), no `map_out`,
argument is provided, and the `inherit_name` argument is not explicitly
set to `False`, the unconnected port of `wire` is automatically renamed to
'myport'. This allows easy extension of existing ports without changing
their names or having to provide `map_out` each time `plug` is called.
- `my_device.place(pad, offset=(10, 10), rotation=pi / 2, port_map={'A': 'gnd'})`
instantiates `pad` at the specified (x, y) offset and with the specified
rotation, adding its ports to those of `my_device`. Port 'A' of `pad` is
renamed to 'gnd' so that further routing can use this signal or net name
rather than the port name on the original `pad` device.
"""
__slots__ = ('pattern', 'ports', '_dead')
pattern: Pattern
""" Layout of this device """
ports: Dict[str, Port]
""" Uniquely-named ports which can be used to snap to other Device instances"""
_dead: bool
""" If True, plug()/place() are skipped (for debugging)"""
def __init__(
self,
pattern: Optional[Pattern] = None,
ports: Optional[Dict[str, Port]] = None,
*,
name: Optional[str] = None,
) -> None:
"""
If `ports` is `None`, two default ports ('A' and 'B') are created.
Both are placed at (0, 0) and have default `ptype`, but 'A' has rotation 0
(attached devices will be placed to the left) and 'B' has rotation
pi (attached devices will be placed to the right).
"""
if pattern is not None:
if name is not None:
raise DeviceError('Only one of `pattern` and `name` may be specified')
self.pattern = pattern
else:
if name is None:
raise DeviceError('Must specify either `pattern` or `name`')
self.pattern = Pattern(name=name)
if ports is None:
self.ports = {
'A': Port([0, 0], rotation=0),
'B': Port([0, 0], rotation=pi),
}
else:
self.ports = copy.deepcopy(ports)
self._dead = False
@overload
def __getitem__(self, key: str) -> Port:
pass
@overload
def __getitem__(self, key: Union[List[str], Tuple[str], KeysView[str], ValuesView[str]]) -> Dict[str, Port]:
pass
def __getitem__(self, key: Union[str, Iterable[str]]) -> Union[Port, Dict[str, Port]]:
"""
For convenience, ports can be read out using square brackets:
- `device['A'] == Port((0, 0), 0)`
- `device[['A', 'B']] == {'A': Port((0, 0), 0),
'B': Port((0, 0), pi)}`
"""
if isinstance(key, str):
return self.ports[key]
else:
return {k: self.ports[k] for k in key}
def rename_ports(
self: D,
mapping: Dict[str, Optional[str]],
overwrite: bool = False,
) -> D:
"""
Renames ports as specified by `mapping`.
Ports can be explicitly deleted by mapping them to `None`.
Args:
mapping: Dict of `{'old_name': 'new_name'}` pairs. Names can be mapped
to `None` to perform an explicit deletion. `'new_name'` can also
overwrite an existing non-renamed port to implicitly delete it if
`overwrite` is set to `True`.
overwrite: Allows implicit deletion of ports if set to `True`; see `mapping`.
Returns:
self
"""
if not overwrite:
duplicates = (set(self.ports.keys()) - set(mapping.keys())) & set(mapping.values())
if duplicates:
raise DeviceError(f'Unrenamed ports would be overwritten: {duplicates}')
renamed = {mapping[k]: self.ports.pop(k) for k in mapping.keys()}
if None in renamed:
del renamed[None]
self.ports.update(renamed) # type: ignore
return self
def check_ports(
self: D,
other_names: Iterable[str],
map_in: Optional[Dict[str, str]] = None,
map_out: Optional[Dict[str, Optional[str]]] = None,
) -> D:
"""
Given the provided port mappings, check that:
- All of the ports specified in the mappings exist
- There are no duplicate port names after all the mappings are performed
Args:
other_names: List of port names being considered for inclusion into
`self.ports` (before mapping)
map_in: Dict of `{'self_port': 'other_port'}` mappings, specifying
port connections between the two devices.
map_out: Dict of `{'old_name': 'new_name'}` mappings, specifying
new names for unconnected `other_names` ports.
Returns:
self
Raises:
`DeviceError` if any ports specified in `map_in` or `map_out` do not
exist in `self.ports` or `other_names`.
`DeviceError` if there are any duplicate names after `map_in` and `map_out`
are applied.
"""
if map_in is None:
map_in = {}
if map_out is None:
map_out = {}
other = set(other_names)
missing_inkeys = set(map_in.keys()) - set(self.ports.keys())
if missing_inkeys:
raise DeviceError(f'`map_in` keys not present in device: {missing_inkeys}')
missing_invals = set(map_in.values()) - other
if missing_invals:
raise DeviceError(f'`map_in` values not present in other device: {missing_invals}')
missing_outkeys = set(map_out.keys()) - other
if missing_outkeys:
raise DeviceError(f'`map_out` keys not present in other device: {missing_outkeys}')
orig_remaining = set(self.ports.keys()) - set(map_in.keys())
other_remaining = other - set(map_out.keys()) - set(map_in.values())
mapped_vals = set(map_out.values())
mapped_vals.discard(None)
conflicts_final = orig_remaining & (other_remaining | mapped_vals)
if conflicts_final:
raise DeviceError(f'Device ports conflict with existing ports: {conflicts_final}')
conflicts_partial = other_remaining & mapped_vals
if conflicts_partial:
raise DeviceError(f'`map_out` targets conflict with non-mapped outputs: {conflicts_partial}')
map_out_counts = Counter(map_out.values())
map_out_counts[None] = 0
conflicts_out = {k for k, v in map_out_counts.items() if v > 1}
if conflicts_out:
raise DeviceError(f'Duplicate targets in `map_out`: {conflicts_out}')
return self
def build(self, name: str) -> 'Device':
"""
Begin building a new device around an instance of the current device
(rather than modifying the current device).
Args:
name: A name for the new device
Returns:
The new `Device` object.
"""
pat = Pattern(name)
pat.addsp(self.pattern)
new = Device(pat, ports=self.ports)
return new
def as_interface(
self,
name: str,
in_prefix: str = 'in_',
out_prefix: str = '',
port_map: Optional[Union[Dict[str, str], Sequence[str]]] = None
) -> 'Device':
"""
Begin building a new device based on all or some of the ports in the
current device. Do not include the current device; instead use it
to define ports (the "interface") for the new device.
The ports specified by `port_map` (default: all ports) are copied to
new device, and additional (input) ports are created facing in the
opposite directions. The specified `in_prefix` and `out_prefix` are
prepended to the port names to differentiate them.
By default, the flipped ports are given an 'in_' prefix and unflipped
ports keep their original names, enabling intuitive construction of
a device that will "plug into" the current device; the 'in_*' ports
are used for plugging the devices together while the original port
names are used for building the new device.
Another use-case could be to build the new device using the 'in_'
ports, creating a new device which could be used in place of the
current device.
Args:
name: Name for the new device
in_prefix: Prepended to port names for newly-created ports with
reversed directions compared to the current device.
out_prefix: Prepended to port names for ports which are directly
copied from the current device.
port_map: Specification for ports to copy into the new device:
- If `None`, all ports are copied.
- If a sequence, only the listed ports are copied
- If a mapping, the listed ports (keys) are copied and
renamed (to the values).
Returns:
The new device, with an empty pattern and 2x as many ports as
listed in port_map.
Raises:
`DeviceError` if `port_map` contains port names not present in the
current device.
`DeviceError` if applying the prefixes results in duplicate port
names.
"""
if port_map:
if isinstance(port_map, dict):
missing_inkeys = set(port_map.keys()) - set(self.ports.keys())
orig_ports = {port_map[k]: v for k, v in self.ports.items() if k in port_map}
else:
port_set = set(port_map)
missing_inkeys = port_set - set(self.ports.keys())
orig_ports = {k: v for k, v in self.ports.items() if k in port_set}
if missing_inkeys:
raise DeviceError(f'`port_map` keys not present in device: {missing_inkeys}')
else:
orig_ports = self.ports
ports_in = {f'{in_prefix}{name}': port.deepcopy().rotate(pi)
for name, port in orig_ports.items()}
ports_out = {f'{out_prefix}{name}': port.deepcopy()
for name, port in orig_ports.items()}
duplicates = set(ports_out.keys()) & set(ports_in.keys())
if duplicates:
raise DeviceError(f'Duplicate keys after prefixing, try a different prefix: {duplicates}')
new = Device(name=name, ports={**ports_in, **ports_out})
return new
def plug(
self: D,
other: O,
map_in: Dict[str, str],
map_out: Optional[Dict[str, Optional[str]]] = None,
*,
mirrored: Tuple[bool, bool] = (False, False),
inherit_name: bool = True,
set_rotation: Optional[bool] = None,
) -> D:
"""
Instantiate the device `other` into the current device, connecting
the ports specified by `map_in` and renaming the unconnected
ports specified by `map_out`.
Examples:
=========
- `my_device.plug(subdevice, {'A': 'C', 'B': 'B'}, map_out={'D': 'myport'})`
instantiates `subdevice` into `my_device`, plugging ports 'A' and 'B'
of `my_device` into ports 'C' and 'B' of `subdevice`. The connected ports
are removed and any unconnected ports from `subdevice` are added to
`my_device`. Port 'D' of `subdevice` (unconnected) is renamed to 'myport'.
- `my_device.plug(wire, {'myport': 'A'})` places port 'A' of `wire` at 'myport'
of `my_device`. If `wire` has only two ports (e.g. 'A' and 'B'), no `map_out`,
argument is provided, and the `inherit_name` argument is not explicitly
set to `False`, the unconnected port of `wire` is automatically renamed to
'myport'. This allows easy extension of existing ports without changing
their names or having to provide `map_out` each time `plug` is called.
Args:
other: A device to instantiate into the current device.
map_in: Dict of `{'self_port': 'other_port'}` mappings, specifying
port connections between the two devices.
map_out: Dict of `{'old_name': 'new_name'}` mappings, specifying
new names for ports in `other`.
mirrored: Enables mirroring `other` across the x or y axes prior
to connecting any ports.
inherit_name: If `True`, and `map_in` specifies only a single port,
and `map_out` is `None`, and `other` has only two ports total,
then automatically renames the output port of `other` to the
name of the port from `self` that appears in `map_in`. This
makes it easy to extend a device with simple 2-port devices
(e.g. wires) without providing `map_out` each time `plug` is
called. See "Examples" above for more info. Default `True`.
set_rotation: If the necessary rotation cannot be determined from
the ports being connected (i.e. all pairs have at least one
port with `rotation=None`), `set_rotation` must be provided
to indicate how much `other` should be rotated. Otherwise,
`set_rotation` must remain `None`.
Returns:
self
Raises:
`DeviceError` if any ports specified in `map_in` or `map_out` do not
exist in `self.ports` or `other_names`.
`DeviceError` if there are any duplicate names after `map_in` and `map_out`
are applied.
`DeviceError` if the specified port mapping is not achieveable (the ports
do not line up)
"""
if self._dead:
logger.error('Skipping plug() since device is dead')
return self
if (inherit_name
and not map_out
and len(map_in) == 1
and len(other.ports) == 2):
out_port_name = next(iter(set(other.ports.keys()) - set(map_in.values())))
map_out = {out_port_name: next(iter(map_in.keys()))}
if map_out is None:
map_out = {}
map_out = copy.deepcopy(map_out)
self.check_ports(other.ports.keys(), map_in, map_out)
translation, rotation, pivot = self.find_transform(other, map_in, mirrored=mirrored,
set_rotation=set_rotation)
# get rid of plugged ports
for ki, vi in map_in.items():
del self.ports[ki]
map_out[vi] = None
self.place(other, offset=translation, rotation=rotation, pivot=pivot,
mirrored=mirrored, port_map=map_out, skip_port_check=True)
return self
def place(
self: D,
other: O,
*,
offset: ArrayLike = (0, 0),
rotation: float = 0,
pivot: ArrayLike = (0, 0),
mirrored: Tuple[bool, bool] = (False, False),
port_map: Optional[Dict[str, Optional[str]]] = None,
skip_port_check: bool = False,
) -> D:
"""
Instantiate the device `other` into the current device, adding its
ports to those of the current device (but not connecting any ports).
Mirroring is applied before rotation; translation (`offset`) is applied last.
Examples:
=========
- `my_device.place(pad, offset=(10, 10), rotation=pi / 2, port_map={'A': 'gnd'})`
instantiates `pad` at the specified (x, y) offset and with the specified
rotation, adding its ports to those of `my_device`. Port 'A' of `pad` is
renamed to 'gnd' so that further routing can use this signal or net name
rather than the port name on the original `pad` device.
Args:
other: A device to instantiate into the current device.
offset: Offset at which to place `other`. Default (0, 0).
rotation: Rotation applied to `other` before placement. Default 0.
pivot: Rotation is applied around this pivot point (default (0, 0)).
Rotation is applied prior to translation (`offset`).
mirrored: Whether `other` should be mirrored across the x and y axes.
Mirroring is applied before translation and rotation.
port_map: Dict of `{'old_name': 'new_name'}` mappings, specifying
new names for ports in `other`. New names can be `None`, which will
delete those ports.
skip_port_check: Can be used to skip the internal call to `check_ports`,
in case it has already been performed elsewhere.
Returns:
self
Raises:
`DeviceError` if any ports specified in `map_in` or `map_out` do not
exist in `self.ports` or `other_names`.
`DeviceError` if there are any duplicate names after `map_in` and `map_out`
are applied.
"""
if self._dead:
logger.error('Skipping place() since device is dead')
return self
if port_map is None:
port_map = {}
if not skip_port_check:
self.check_ports(other.ports.keys(), map_in=None, map_out=port_map)
ports = {}
for name, port in other.ports.items():
new_name = port_map.get(name, name)
if new_name is None:
continue
ports[new_name] = port
for name, port in ports.items():
p = port.deepcopy()
p.mirror2d(mirrored)
p.rotate_around(pivot, rotation)
p.translate(offset)
self.ports[name] = p
sp = SubPattern(other.pattern, mirrored=mirrored)
sp.rotate_around(pivot, rotation)
sp.translate(offset)
self.pattern.subpatterns.append(sp)
return self
def find_transform(
self: D,
other: O,
map_in: Dict[str, str],
*,
mirrored: Tuple[bool, bool] = (False, False),
set_rotation: Optional[bool] = None,
) -> Tuple[NDArray[numpy.float64], float, NDArray[numpy.float64]]:
"""
Given a device `other` and a mapping `map_in` specifying port connections,
find the transform which will correctly align the specified ports.
Args:
other: a device
map_in: Dict of `{'self_port': 'other_port'}` mappings, specifying
port connections between the two devices.
mirrored: Mirrors `other` across the x or y axes prior to
connecting any ports.
set_rotation: If the necessary rotation cannot be determined from
the ports being connected (i.e. all pairs have at least one
port with `rotation=None`), `set_rotation` must be provided
to indicate how much `other` should be rotated. Otherwise,
`set_rotation` must remain `None`.
Returns:
- The (x, y) translation (performed last)
- The rotation (radians, counterclockwise)
- The (x, y) pivot point for the rotation
The rotation should be performed before the translation.
"""
s_ports = self[map_in.keys()]
o_ports = other[map_in.values()]
s_offsets = numpy.array([p.offset for p in s_ports.values()])
o_offsets = numpy.array([p.offset for p in o_ports.values()])
s_types = [p.ptype for p in s_ports.values()]
o_types = [p.ptype for p in o_ports.values()]
s_rotations = numpy.array([p.rotation if p.rotation is not None else 0 for p in s_ports.values()])
o_rotations = numpy.array([p.rotation if p.rotation is not None else 0 for p in o_ports.values()])
s_has_rot = numpy.array([p.rotation is not None for p in s_ports.values()], dtype=bool)
o_has_rot = numpy.array([p.rotation is not None for p in o_ports.values()], dtype=bool)
has_rot = s_has_rot & o_has_rot
if mirrored[0]:
o_offsets[:, 1] *= -1
o_rotations *= -1
if mirrored[1]:
o_offsets[:, 0] *= -1
o_rotations *= -1
o_rotations += pi
type_conflicts = numpy.array([st != ot and st != 'unk' and ot != 'unk'
for st, ot in zip(s_types, o_types)])
if type_conflicts.any():
ports = numpy.where(type_conflicts)
msg = 'Ports have conflicting types:\n'
for nn, (k, v) in enumerate(map_in.items()):
if type_conflicts[nn]:
msg += f'{k} | {s_types[nn]}:{o_types[nn]} | {v}\n'
msg = ''.join(traceback.format_stack()) + '\n' + msg
warnings.warn(msg, stacklevel=2)
rotations = numpy.mod(s_rotations - o_rotations - pi, 2 * pi)
if not has_rot.any():
if set_rotation is None:
DeviceError('Must provide set_rotation if rotation is indeterminate')
rotations[:] = set_rotation
else:
rotations[~has_rot] = rotations[has_rot][0]
if not numpy.allclose(rotations[:1], rotations):
rot_deg = numpy.rad2deg(rotations)
msg = f'Port orientations do not match:\n'
for nn, (k, v) in enumerate(map_in.items()):
msg += f'{k} | {rot_deg[nn]:g} | {v}\n'
raise DeviceError(msg)
pivot = o_offsets[0].copy()
rotate_offsets_around(o_offsets, pivot, rotations[0])
translations = s_offsets - o_offsets
if not numpy.allclose(translations[:1], translations):
msg = f'Port translations do not match:\n'
for nn, (k, v) in enumerate(map_in.items()):
msg += f'{k} | {translations[nn]} | {v}\n'
raise DeviceError(msg)
return translations[0], rotations[0], o_offsets[0]
def translate(self: D, offset: ArrayLike) -> D:
"""
Translate the pattern and all ports.
Args:
offset: (x, y) distance to translate by
Returns:
self
"""
self.pattern.translate_elements(offset)
for port in self.ports.values():
port.translate(offset)
return self
def rotate_around(self: D, pivot: ArrayLike, angle: float) -> D:
"""
Translate the pattern and all ports.
Args:
offset: (x, y) distance to translate by
Returns:
self
"""
self.pattern.rotate_around(pivot, angle)
for port in self.ports.values():
port.rotate_around(pivot, angle)
return self
def mirror(self: D, axis: int) -> D:
"""
Translate the pattern and all ports across the specified axis.
Args:
axis: Axis to mirror across (x=0, y=1)
Returns:
self
"""
self.pattern.mirror(axis)
for p in self.ports.values():
p.mirror(axis)
return self
def set_dead(self: D) -> D:
"""
Disallows further changes through `plug()` or `place()`.
This is meant for debugging:
```
dev.plug(a, ...)
dev.set_dead() # added for debug purposes
dev.plug(b, ...) # usually raises an error, but now skipped
dev.plug(c, ...) # also skipped
dev.pattern.visualize() # shows the device as of the set_dead() call
```
Returns:
self
"""
self._dead = True
return self
def rename(self: D, name: str) -> D:
"""
Renames the pattern and returns the device
Args:
name: The new name
Returns:
self
"""
self.pattern.name = name
return self
def __repr__(self) -> str:
s = f'<Device {self.pattern} ['
for name, port in self.ports.items():
s += f'\n\t{name}: {port}'
s += ']>'
return s
def rotate_offsets_around(
offsets: NDArray[numpy.float64],
pivot: NDArray[numpy.float64],
angle: float,
) -> NDArray[numpy.float64]:
offsets -= pivot
offsets[:] = (rotation_matrix_2d(angle) @ offsets.T).T
offsets += pivot
return offsets

330
masque/builder/pather.py Normal file
View File

@ -0,0 +1,330 @@
from typing import Self, Sequence, MutableMapping, Mapping
import copy
import logging
import numpy
from numpy import pi
from numpy.typing import ArrayLike
from ..pattern import Pattern
from ..library import ILibrary
from ..error import PortError, BuildError
from ..ports import PortList, Port
from ..abstract import Abstract
from ..utils import SupportsBool
from .tools import Tool
from .utils import ell
from .builder import Builder
logger = logging.getLogger(__name__)
class Pather(Builder):
"""
TODO DOCUMENT Builder
A `Device` is a combination of a `Pattern` with a set of named `Port`s
which can be used to "snap" devices together to make complex layouts.
`Device`s can be as simple as one or two ports (e.g. an electrical pad
or wire), but can also be used to build and represent a large routed
layout (e.g. a logical block with multiple I/O connections or even a
full chip).
For convenience, ports can be read out using square brackets:
- `device['A'] == Port((0, 0), 0)`
- `device[['A', 'B']] == {'A': Port((0, 0), 0), 'B': Port((0, 0), pi)}`
Examples: Creating a Device
===========================
- `Device(pattern, ports={'A': port_a, 'C': port_c})` uses an existing
pattern and defines some ports.
- `Device(ports=None)` makes a new empty pattern with
default ports ('A' and 'B', in opposite directions, at (0, 0)).
- `my_device.build('my_layout')` makes a new pattern and instantiates
`my_device` in it with offset (0, 0) as a base for further building.
- `my_device.as_interface('my_component', port_map=['A', 'B'])` makes a new
(empty) pattern, copies over ports 'A' and 'B' from `my_device`, and
creates additional ports 'in_A' and 'in_B' facing in the opposite
directions. This can be used to build a device which can plug into
`my_device` (using the 'in_*' ports) but which does not itself include
`my_device` as a subcomponent.
Examples: Adding to a Device
============================
- `my_device.plug(subdevice, {'A': 'C', 'B': 'B'}, map_out={'D': 'myport'})`
instantiates `subdevice` into `my_device`, plugging ports 'A' and 'B'
of `my_device` into ports 'C' and 'B' of `subdevice`. The connected ports
are removed and any unconnected ports from `subdevice` are added to
`my_device`. Port 'D' of `subdevice` (unconnected) is renamed to 'myport'.
- `my_device.plug(wire, {'myport': 'A'})` places port 'A' of `wire` at 'myport'
of `my_device`. If `wire` has only two ports (e.g. 'A' and 'B'), no `map_out`,
argument is provided, and the `inherit_name` argument is not explicitly
set to `False`, the unconnected port of `wire` is automatically renamed to
'myport'. This allows easy extension of existing ports without changing
their names or having to provide `map_out` each time `plug` is called.
- `my_device.place(pad, offset=(10, 10), rotation=pi / 2, port_map={'A': 'gnd'})`
instantiates `pad` at the specified (x, y) offset and with the specified
rotation, adding its ports to those of `my_device`. Port 'A' of `pad` is
renamed to 'gnd' so that further routing can use this signal or net name
rather than the port name on the original `pad` device.
"""
__slots__ = ('tools',)
library: ILibrary
"""
Library from which existing patterns should be referenced, and to which
new ones should be added
"""
tools: dict[str | None, Tool]
"""
Tool objects are used to dynamically generate new single-use Devices
(e.g wires or waveguides) to be plugged into this device.
"""
def __init__(
self,
library: ILibrary,
*,
pattern: Pattern | None = None,
ports: str | Mapping[str, Port] | None = None,
tools: Tool | MutableMapping[str | None, Tool] | None = None,
name: str | None = None,
) -> None:
"""
# TODO documentation for Builder() constructor
# TODO MOVE THE BELOW DOCS to PortList
# If `ports` is `None`, two default ports ('A' and 'B') are created.
# Both are placed at (0, 0) and have default `ptype`, but 'A' has rotation 0
# (attached devices will be placed to the left) and 'B' has rotation
# pi (attached devices will be placed to the right).
"""
self._dead = False
self.library = library
if pattern is not None:
self.pattern = pattern
else:
self.pattern = Pattern()
if ports is not None:
if self.pattern.ports:
raise BuildError('Ports supplied for pattern with pre-existing ports!')
if isinstance(ports, str):
ports = library.abstract(ports).ports
self.pattern.ports.update(copy.deepcopy(dict(ports)))
if tools is None:
self.tools = {}
elif isinstance(tools, Tool):
self.tools = {None: tools}
else:
self.tools = dict(tools)
if name is not None:
library[name] = self.pattern
@classmethod
def mk(
cls,
library: ILibrary,
name: str,
*,
ports: str | Mapping[str, Port] | None = None,
tools: Tool | MutableMapping[str | None, Tool] | None = None,
) -> tuple[str, 'Pather']:
""" Name-and-make combination """ # TODO document
pather = Pather(library, name=name, ports=ports, tools=tools)
return name, pather
@classmethod
def from_builder(
cls,
builder: Builder,
*,
library: ILibrary | None = None,
tools: Tool | MutableMapping[str | None, Tool] | None = None,
) -> 'Pather':
"""TODO from_builder docs"""
library = library if library is not None else builder.library
if library is None:
raise BuildError('No library available for Pather!')
new = Pather(library=library, tools=tools, pattern=builder.pattern)
return new
@classmethod
def interface(
cls,
source: PortList | Mapping[str, Port] | str,
*,
library: ILibrary | None = None,
tools: Tool | MutableMapping[str | None, Tool] | None = None,
in_prefix: str = 'in_',
out_prefix: str = '',
port_map: dict[str, str] | Sequence[str] | None = None,
name: str | None = None,
) -> 'Pather':
"""
TODO doc pather.interface
"""
if library is None:
if hasattr(source, 'library') and isinstance(source.library, ILibrary):
library = source.library
else:
raise BuildError('No library provided (and not present in `source.library`')
if tools is None and hasattr(source, 'tools') and isinstance(source.tools, dict):
tools = source.tools
new = Pather.from_builder(
Builder.interface(
source=source,
library=library,
in_prefix=in_prefix,
out_prefix=out_prefix,
port_map=port_map,
name=name,
),
tools=tools,
)
return new
def __repr__(self) -> str:
s = f'<Pather {self.pattern} >' # TODO maybe show lib and tools? in builder repr?
return s
def retool(
self,
tool: Tool,
keys: str | Sequence[str | None] | None = None,
) -> Self:
if keys is None or isinstance(keys, str):
self.tools[keys] = tool
else:
for key in keys:
self.tools[key] = tool
return self
def path(
self,
portspec: str,
ccw: SupportsBool | None,
length: float,
*,
tool_port_names: tuple[str, str] = ('A', 'B'),
base_name: str = '_path',
**kwargs,
) -> Self:
if self._dead:
logger.error('Skipping path() since device is dead')
return self
tool = self.tools.get(portspec, self.tools[None])
in_ptype = self.pattern[portspec].ptype
pat = tool.path(ccw, length, in_ptype=in_ptype, port_names=tool_port_names, **kwargs)
name = self.library.get_name(base_name)
self.library[name] = pat
return self.plug(Abstract(name, pat.ports), {portspec: tool_port_names[0]})
def path_to(
self,
portspec: str,
ccw: SupportsBool | None,
position: float,
*,
tool_port_names: tuple[str, str] = ('A', 'B'),
base_name: str = '_pathto',
**kwargs,
) -> Self:
if self._dead:
logger.error('Skipping path_to() since device is dead')
return self
port = self.pattern[portspec]
x, y = port.offset
if port.rotation is None:
raise PortError(f'Port {portspec} has no rotation and cannot be used for path_to()')
if not numpy.isclose(port.rotation % (pi / 2), 0):
raise BuildError('path_to was asked to route from non-manhattan port')
is_horizontal = numpy.isclose(port.rotation % pi, 0)
if is_horizontal:
if numpy.sign(numpy.cos(port.rotation)) == numpy.sign(position - x):
raise BuildError(f'path_to routing to behind source port: x={x:g} to {position:g}')
length = numpy.abs(position - x)
else:
if numpy.sign(numpy.sin(port.rotation)) == numpy.sign(position - y):
raise BuildError(f'path_to routing to behind source port: y={y:g} to {position:g}')
length = numpy.abs(position - y)
return self.path(portspec, ccw, length, tool_port_names=tool_port_names, base_name=base_name, **kwargs)
def mpath(
self,
portspec: str | Sequence[str],
ccw: SupportsBool | None,
*,
spacing: float | ArrayLike | None = None,
set_rotation: float | None = None,
tool_port_names: tuple[str, str] = ('A', 'B'),
force_container: bool = False,
base_name: str = '_mpath',
**kwargs,
) -> Self:
if self._dead:
logger.error('Skipping mpath() since device is dead')
return self
bound_types = set()
if 'bound_type' in kwargs:
bound_types.add(kwargs['bound_type'])
bound = kwargs['bound']
for bt in ('emin', 'emax', 'pmin', 'pmax', 'min_past_furthest'):
if bt in kwargs:
bound_types.add(bt)
bound = kwargs[bt]
if not bound_types:
raise BuildError('No bound type specified for mpath')
elif len(bound_types) > 1:
raise BuildError(f'Too many bound types specified for mpath: {bound_types}')
bound_type = tuple(bound_types)[0]
if isinstance(portspec, str):
portspec = [portspec]
ports = self.pattern[tuple(portspec)]
extensions = ell(ports, ccw, spacing=spacing, bound=bound, bound_type=bound_type, set_rotation=set_rotation)
if len(ports) == 1 and not force_container:
# Not a bus, so having a container just adds noise to the layout
port_name = tuple(portspec)[0]
return self.path(port_name, ccw, extensions[port_name], tool_port_names=tool_port_names)
else:
bld = Pather.interface(source=ports, library=self.library, tools=self.tools)
for port_name, length in extensions.items():
bld.path(port_name, ccw, length, tool_port_names=tool_port_names)
name = self.library.get_name(base_name)
self.library[name] = bld.pattern
return self.plug(Abstract(name, bld.pattern.ports), {sp: 'in_' + sp for sp in ports.keys()}) # TODO safe to use 'in_'?
# TODO def path_join() and def bus_join()?
def flatten(self) -> Self:
"""
Flatten the contained pattern, using the contained library to resolve references.
Returns:
self
"""
self.pattern.flatten(self.library)
return self

View File

@ -1,112 +0,0 @@
"""
Functions for writing port data into a Pattern (`dev2pat`) and retrieving it (`pat2dev`).
These use the format 'name:ptype angle_deg' written into labels, which are placed at
the port locations. This particular approach is just a sensible default; feel free to
to write equivalent functions for your own format or alternate storage methods.
"""
from typing import Sequence
import logging
import numpy
from ..pattern import Pattern
from ..label import Label
from ..utils import rotation_matrix_2d, layer_t
from .devices import Device, Port
logger = logging.getLogger(__name__)
def dev2pat(device: Device, layer: layer_t) -> Pattern:
"""
Place a text label at each port location, specifying the port data in the format
'name:ptype angle_deg'
This can be used to debug port locations or to automatically generate ports
when reading in a GDS file.
NOTE that `device` is modified by this function, and `device.pattern` is returned.
Args:
device: The device which is to have its ports labeled. MODIFIED in-place.
layer: The layer on which the labels will be placed.
Returns:
`device.pattern`
"""
for name, port in device.ports.items():
if port.rotation is None:
angle_deg = numpy.inf
else:
angle_deg = numpy.rad2deg(port.rotation)
device.pattern.labels += [
Label(string=f'{name}:{port.ptype} {angle_deg:g}', layer=layer, offset=port.offset)
]
return device.pattern
def pat2dev(
pattern: Pattern,
layers: Sequence[layer_t],
max_depth: int = 999_999,
skip_subcells: bool = True,
) -> Device:
"""
Examine `pattern` for labels specifying port info, and use that info
to build a `Device` object.
Labels are assumed to be placed at the port locations, and have the format
'name:ptype angle_deg'
Args:
pattern: Pattern object to scan for labels.
layers: Search for labels on all the given layers.
max_depth: Maximum hierarcy depth to search. Default 999_999.
Reduce this to 0 to avoid ever searching subcells.
skip_subcells: If port labels are found at a given hierarcy level,
do not continue searching at deeper levels. This allows subcells
to contain their own port info (and thus become their own Devices).
Default True.
Returns:
The constructed Device object. Port labels are not removed from the pattern.
"""
ports = {} # Note: could do a list here, if they're not unique
annotated_cells = set()
def find_ports_each(pat, hierarchy, transform, memo) -> Pattern:
if len(hierarchy) > max_depth - 1:
return pat
if skip_subcells and any(parent in annotated_cells for parent in hierarchy):
return pat
labels = [ll for ll in pat.labels if ll.layer in layers]
if len(labels) == 0:
return pat
if skip_subcells:
annotated_cells.add(pat)
mirr_factor = numpy.array((1, -1)) ** transform[3]
rot_matrix = rotation_matrix_2d(transform[2])
for label in labels:
name, property_string = label.string.split(':')
properties = property_string.split(' ')
ptype = properties[0]
angle_deg = float(properties[1]) if len(ptype) else 0
xy_global = transform[:2] + rot_matrix @ (label.offset * mirr_factor)
angle = numpy.deg2rad(angle_deg) * mirr_factor[0] * mirr_factor[1] + transform[2]
if name in ports:
logger.info(f'Duplicate port {name} in pattern {pattern.name}')
ports[name] = Port(offset=xy_global, rotation=angle, ptype=ptype)
return pat
pattern.dfs(visit_before=find_ports_each, transform=True)
return Device(pattern, ports)

View File

@ -0,0 +1,517 @@
from typing import Self, Sequence, Mapping, Final
import copy
import logging
from collections import defaultdict
import numpy
from numpy import pi
from numpy.typing import ArrayLike
from ..pattern import Pattern
from ..ref import Ref
from ..library import ILibrary
from ..error import PortError, BuildError
from ..ports import PortList, Port
from ..abstract import Abstract
from ..utils import rotation_matrix_2d
from ..utils import SupportsBool
from .tools import Tool, render_step_t
from .utils import ell
from .builder import Builder
logger = logging.getLogger(__name__)
class RenderPather(PortList):
__slots__ = ('pattern', 'library', 'paths', 'tools', '_dead', )
pattern: Pattern
""" Layout of this device """
library: ILibrary | None
""" Library from which patterns should be referenced """
_dead: bool
""" If True, plug()/place() are skipped (for debugging) """
paths: defaultdict[str, list[render_step_t]]
# op, start_port, dx, dy, o_ptype tool
tools: dict[str | None, Tool]
"""
Tool objects are used to dynamically generate new single-use Devices
(e.g wires or waveguides) to be plugged into this device.
"""
@property
def ports(self) -> dict[str, Port]:
return self.pattern.ports
@ports.setter
def ports(self, value: dict[str, Port]) -> None:
self.pattern.ports = value
def __init__(
self,
library: ILibrary | None = None,
*,
pattern: Pattern | None = None,
ports: str | Mapping[str, Port] | None = None,
name: str | None = None,
) -> None:
"""
# TODO documentation for Builder() constructor
"""
self._dead = False
self.library = library
if pattern is not None:
self.pattern = pattern
else:
self.pattern = Pattern()
if ports is not None:
if self.pattern.ports:
raise BuildError('Ports supplied for pattern with pre-existing ports!')
if isinstance(ports, str):
if library is None:
raise BuildError('Ports given as a string, but `library` was `None`!')
ports = library.abstract(ports).ports
self.pattern.ports.update(copy.deepcopy(dict(ports)))
if name is not None:
if library is None:
raise BuildError('Name was supplied, but no library was given!')
library[name] = self.pattern
self.paths = defaultdict(list)
@classmethod
def interface(
cls,
source: PortList | Mapping[str, Port] | str,
*,
library: ILibrary | None = None,
in_prefix: str = 'in_',
out_prefix: str = '',
port_map: dict[str, str] | Sequence[str] | None = None,
name: str | None = None,
) -> 'RenderPather':
"""
Begin building a new device based on all or some of the ports in the
source device. Do not include the source device; instead use it
to define ports (the "interface") for the new device.
The ports specified by `port_map` (default: all ports) are copied to
new device, and additional (input) ports are created facing in the
opposite directions. The specified `in_prefix` and `out_prefix` are
prepended to the port names to differentiate them.
By default, the flipped ports are given an 'in_' prefix and unflipped
ports keep their original names, enabling intuitive construction of
a device that will "plug into" the current device; the 'in_*' ports
are used for plugging the devices together while the original port
names are used for building the new device.
Another use-case could be to build the new device using the 'in_'
ports, creating a new device which could be used in place of the
current device.
Args:
source: A collection of ports (e.g. Pattern, Builder, or dict)
from which to create the interface.
library: Used for buildin functions; if not passed and the source
library: Library from which existing patterns should be referenced,
and to which new ones should be added. If not provided,
the source's library will be used (if available).
in_prefix: Prepended to port names for newly-created ports with
reversed directions compared to the current device.
out_prefix: Prepended to port names for ports which are directly
copied from the current device.
port_map: Specification for ports to copy into the new device:
- If `None`, all ports are copied.
- If a sequence, only the listed ports are copied
- If a mapping, the listed ports (keys) are copied and
renamed (to the values).
Returns:
The new builder, with an empty pattern and 2x as many ports as
listed in port_map.
Raises:
`PortError` if `port_map` contains port names not present in the
current device.
`PortError` if applying the prefixes results in duplicate port
names.
"""
if library is None:
if hasattr(source, 'library') and isinstance(source.library, ILibrary):
library = source.library
if isinstance(source, str):
if library is None:
raise BuildError('Source given as a string, but `library` was `None`!')
orig_ports = library.abstract(source).ports
elif isinstance(source, PortList):
orig_ports = source.ports
elif isinstance(source, dict):
orig_ports = source
else:
raise BuildError(f'Unable to get ports from {type(source)}: {source}')
if port_map:
if isinstance(port_map, dict):
missing_inkeys = set(port_map.keys()) - set(orig_ports.keys())
mapped_ports = {port_map[k]: v for k, v in orig_ports.items() if k in port_map}
else:
port_set = set(port_map)
missing_inkeys = port_set - set(orig_ports.keys())
mapped_ports = {k: v for k, v in orig_ports.items() if k in port_set}
if missing_inkeys:
raise PortError(f'`port_map` keys not present in source: {missing_inkeys}')
else:
mapped_ports = orig_ports
ports_in = {f'{in_prefix}{pname}': port.deepcopy().rotate(pi)
for pname, port in mapped_ports.items()}
ports_out = {f'{out_prefix}{pname}': port.deepcopy()
for pname, port in mapped_ports.items()}
duplicates = set(ports_out.keys()) & set(ports_in.keys())
if duplicates:
raise PortError(f'Duplicate keys after prefixing, try a different prefix: {duplicates}')
new = RenderPather(library=library, ports={**ports_in, **ports_out}, name=name)
return new
def plug(
self,
other: Abstract | str,
map_in: dict[str, str],
map_out: dict[str, str | None] | None = None,
*,
mirrored: tuple[bool, bool] = (False, False),
inherit_name: bool = True,
set_rotation: bool | None = None,
) -> Self:
if self._dead:
logger.error('Skipping plug() since device is dead')
return self
if isinstance(other, str):
if self.library is None:
raise BuildError('No library available, but `other` was a string!')
other = self.library.abstract(other)
# If asked to inherit a name, check that all conditions are met
if (inherit_name
and not map_out
and len(map_in) == 1
and len(other.ports) == 2):
out_port_name = next(iter(set(other.ports.keys()) - set(map_in.values())))
map_out = {out_port_name: next(iter(map_in.keys()))}
if map_out is None:
map_out = {}
map_out = copy.deepcopy(map_out)
self.check_ports(other.ports.keys(), map_in, map_out)
translation, rotation, pivot = self.find_transform(
other,
map_in,
mirrored=mirrored,
set_rotation=set_rotation,
)
# get rid of plugged ports
for ki, vi in map_in.items():
del self.ports[ki]
map_out[vi] = None
if ki in self.paths:
self.paths[ki].append(('P', None, 0.0, 0.0, 'unk', None))
self.place(other, offset=translation, rotation=rotation, pivot=pivot,
mirrored=mirrored, port_map=map_out, skip_port_check=True)
return self
def place(
self,
other: Abstract | str,
*,
offset: ArrayLike = (0, 0),
rotation: float = 0,
pivot: ArrayLike = (0, 0),
mirrored: tuple[bool, bool] = (False, False),
port_map: dict[str, str | None] | None = None,
skip_port_check: bool = False,
) -> Self:
if self._dead:
logger.error('Skipping place() since device is dead')
return self
if isinstance(other, str):
if self.library is None:
raise BuildError('No library available, but `other` was a string!')
other = self.library.abstract(other)
if port_map is None:
port_map = {}
if not skip_port_check:
self.check_ports(other.ports.keys(), map_in=None, map_out=port_map)
ports = {}
for name, port in other.ports.items():
new_name = port_map.get(name, name)
if new_name is None:
continue
ports[new_name] = port
if new_name in self.paths:
self.paths[new_name].append(('P', None, 0.0, 0.0, 'unk', None))
for name, port in ports.items():
p = port.deepcopy()
p.mirror2d(mirrored)
p.rotate_around(pivot, rotation)
p.translate(offset)
self.ports[name] = p
sp = Ref(other.name, mirrored=mirrored)
sp.rotate_around(pivot, rotation)
sp.translate(offset)
self.pattern.refs.append(sp)
return self
def path(
self,
portspec: str,
ccw: SupportsBool | None,
length: float,
**kwargs,
) -> Self:
if self._dead:
logger.error('Skipping path() since device is dead')
return self
port = self.pattern[portspec]
in_ptype = port.ptype
port_rot = port.rotation
assert port_rot is not None # TODO allow manually setting rotation?
tool = self.tools.get(portspec, self.tools[None])
# ask the tool for bend size (fill missing dx or dy), check feasibility, and get out_ptype
bend_radius, out_ptype = tool.planL(ccw, length, in_ptype=in_ptype, **kwargs)
if ccw is None:
bend_run = 0.0
elif bool(ccw):
bend_run = bend_radius
else:
bend_run = -bend_radius
dx, dy = rotation_matrix_2d(port_rot + pi) @ [length, bend_run]
step: Final = ('L', port.deepcopy(), dx, dy, out_ptype, tool)
self.paths[portspec].append(step)
# Update port
port.offset += (dx, dy)
if ccw is not None:
port.rotate((-1 if ccw else 1) * pi / 2)
port.ptype = out_ptype
return self
'''
- record ('path', port, dx, dy, out_ptype, tool)
- to render, ccw = {0: None, 1: True, -1: False}[numpy.sign(dx) * numpy.sign(dy) * (-1 if x_start else 1)
- length is just dx or dy
- in_ptype and out_ptype are taken directly
- for sbend: dx and dy are maybe reordered (length and jog)
'''
def path_to(
self,
portspec: str,
ccw: SupportsBool | None,
position: float,
**kwargs,
) -> Self:
if self._dead:
logger.error('Skipping path_to() since device is dead')
return self
port = self.pattern[portspec]
x, y = port.offset
if port.rotation is None:
raise PortError(f'Port {portspec} has no rotation and cannot be used for path_to()')
if not numpy.isclose(port.rotation % (pi / 2), 0):
raise BuildError('path_to was asked to route from non-manhattan port')
is_horizontal = numpy.isclose(port.rotation % pi, 0)
if is_horizontal:
if numpy.sign(numpy.cos(port.rotation)) == numpy.sign(position - x):
raise BuildError(f'path_to routing to behind source port: x={x:g} to {position:g}')
length = numpy.abs(position - x)
else:
if numpy.sign(numpy.sin(port.rotation)) == numpy.sign(position - y):
raise BuildError(f'path_to routing to behind source port: y={y:g} to {position:g}')
length = numpy.abs(position - y)
return self.path(portspec, ccw, length, **kwargs)
def mpath(
self,
portspec: str | Sequence[str],
ccw: SupportsBool | None,
*,
spacing: float | ArrayLike | None = None,
set_rotation: float | None = None,
**kwargs,
) -> Self:
if self._dead:
logger.error('Skipping mpath() since device is dead')
return self
bound_types = set()
if 'bound_type' in kwargs:
bound_types.add(kwargs['bound_type'])
bound = kwargs['bound']
for bt in ('emin', 'emax', 'pmin', 'pmax', 'min_past_furthest'):
if bt in kwargs:
bound_types.add(bt)
bound = kwargs[bt]
if not bound_types:
raise BuildError('No bound type specified for mpath')
elif len(bound_types) > 1:
raise BuildError(f'Too many bound types specified for mpath: {bound_types}')
bound_type = tuple(bound_types)[0]
if isinstance(portspec, str):
portspec = [portspec]
ports = self.pattern[tuple(portspec)]
extensions = ell(ports, ccw, spacing=spacing, bound=bound, bound_type=bound_type, set_rotation=set_rotation)
if len(ports) == 1:
# Not a bus, so having a container just adds noise to the layout
port_name = tuple(portspec)[0]
self.path(port_name, ccw, extensions[port_name])
else:
for port_name, length in extensions.items():
self.path(port_name, ccw, length)
return self
def render(self, lib: ILibrary | None = None) -> Self:
lib = lib if lib is not None else self.library
assert lib is not None
tool_port_names = ('A', 'B')
bb = Builder(lib)
for portspec, steps in self.paths.items():
batch: list[render_step_t] = []
for step in steps:
opcode, _start_port, _dx, _dy, _out_ptype, tool = step
appendable_op = opcode in ('L', 'S', 'U')
same_tool = batch and tool == batch[-1]
if batch and (not appendable_op or not same_tool):
# If we can't continue a batch, render it
assert tool is not None
assert batch[0][1] is not None
name = lib << tool.render(batch, portnames=tool_port_names)
bb.ports[portspec] = batch[0][1]
bb.plug(name, {portspec: tool_port_names[0]})
batch = []
# batch is emptied already if we couldn't
if appendable_op:
batch.append(step)
# Opcodes which break the batch go below this line
if not appendable_op:
del bb.ports[portspec]
if batch:
# A batch didn't end yet
assert tool is not None
assert batch[0][1] is not None
name = lib << tool.render(batch, portnames=tool_port_names)
bb.ports[portspec] = batch[0][1]
bb.plug(name, {portspec: tool_port_names[0]})
bb.ports.clear()
self.pattern.append(bb.pattern)
return self
def translate(self, offset: ArrayLike) -> Self:
"""
Translate the pattern and all ports.
Args:
offset: (x, y) distance to translate by
Returns:
self
"""
self.pattern.translate_elements(offset)
return self
def rotate_around(self, pivot: ArrayLike, angle: float) -> Self:
"""
Rotate the pattern and all ports.
Args:
angle: angle (radians, counterclockwise) to rotate by
pivot: location to rotate around
Returns:
self
"""
self.pattern.rotate_around(pivot, angle)
return self
def mirror(self, axis: int) -> Self:
"""
Mirror the pattern and all ports across the specified axis.
Args:
axis: Axis to mirror across (x=0, y=1)
Returns:
self
"""
self.pattern.mirror(axis)
return self
def set_dead(self) -> Self:
"""
Disallows further changes through `plug()` or `place()`.
This is meant for debugging:
```
dev.plug(a, ...)
dev.set_dead() # added for debug purposes
dev.plug(b, ...) # usually raises an error, but now skipped
dev.plug(c, ...) # also skipped
dev.pattern.visualize() # shows the device as of the set_dead() call
```
Returns:
self
"""
self._dead = True
return self
def __repr__(self) -> str:
s = f'<RenderPather {self.pattern} >' # TODO maybe show lib and tools? in builder repr?
return s

158
masque/builder/tools.py Normal file
View File

@ -0,0 +1,158 @@
"""
Tools are objects which dynamically generate simple single-use devices (e.g. wires or waveguides)
"""
from typing import TYPE_CHECKING, Sequence, Literal, Callable
from abc import ABCMeta, abstractmethod
import numpy
from ..utils import SupportsBool, rotation_matrix_2d
from ..ports import Port
from ..pattern import Pattern
from ..abstract import Abstract
from ..library import ILibrary, Library
from ..error import BuildError
from .builder import Builder
render_step_t = (
tuple[Literal['L', 'S', 'U'], Port, float, float, str, 'Tool']
| tuple[Literal['P'], None, float, float, str, None]
)
class Tool:
def path(
self,
ccw: SupportsBool | None,
length: float,
*,
in_ptype: str | None = None,
out_ptype: str | None = None,
port_names: tuple[str, str] = ('A', 'B'),
**kwargs,
) -> Pattern:
raise NotImplementedError(f'path() not implemented for {type(self)}')
def planL(
self,
ccw: SupportsBool | None,
length: float,
*,
in_ptype: str | None = None,
out_ptype: str | None = None,
**kwargs,
) -> tuple[float, str]:
raise NotImplementedError(f'planL() not implemented for {type(self)}')
def planS(
self,
ccw: SupportsBool | None,
length: float,
jog: float,
*,
in_ptype: str | None = None,
out_ptype: str | None = None,
**kwargs,
) -> str: # out_ptype only?
raise NotImplementedError(f'planS() not implemented for {type(self)}')
def render(
self,
batch: Sequence[render_step_t],
*,
in_ptype: str | None = None,
out_ptype: str | None = None,
port_names: Sequence[str] = ('A', 'B'),
**kwargs,
) -> ILibrary:
assert batch[0][-1] == self
raise NotImplementedError(f'render() not implemented for {type(self)}')
class BasicTool(Tool, metaclass=ABCMeta):
straight: tuple[Callable[[float], Pattern], str, str]
bend: tuple[Abstract, str, str] # Assumed to be clockwise
transitions: dict[str, tuple[Abstract, str, str]]
def path(
self,
ccw: SupportsBool | None,
length: float,
*,
in_ptype: str | None = None,
out_ptype: str | None = None,
port_names: tuple[str, str] = ('A', 'B'),
**kwargs,
) -> Pattern:
# TODO check all the math for L-shaped bends
straight_length = length
bend_run = 0
if ccw is not None:
bend, bport_in, bport_out = self.bend
brot = bend.ports[bport_in].rotation
assert brot is not None
bend_dxy = numpy.abs(
rotation_matrix_2d(-brot) @ (
bend.ports[bport_out].offset
- bend.ports[bport_in].offset
)
)
straight_length -= bend_dxy[0]
bend_run += bend_dxy[1]
else:
bend_dxy = numpy.zeros(2)
in_transition = self.transitions.get('unk' if in_ptype is None else in_ptype, None)
if in_transition is not None:
ipat, iport_theirs, iport_ours = in_transition
irot = ipat.ports[iport_theirs].rotation
assert irot is not None
itrans_dxy = rotation_matrix_2d(-irot) @ (
ipat.ports[iport_ours].offset
- ipat.ports[iport_theirs].offset
)
straight_length -= itrans_dxy[0]
bend_run += itrans_dxy[1]
else:
itrans_dxy = numpy.zeros(2)
out_transition = self.transitions.get('unk' if out_ptype is None else out_ptype, None)
if out_transition is not None:
opat, oport_theirs, oport_ours = out_transition
orot = opat.ports[oport_ours].rotation
assert orot is not None
otrans_dxy = rotation_matrix_2d(-orot) @ (
opat.ports[oport_theirs].offset
- opat.ports[oport_ours].offset
)
if ccw:
otrans_dxy[0] *= -1
straight_length -= otrans_dxy[1]
bend_run += otrans_dxy[0]
else:
otrans_dxy = numpy.zeros(2)
if straight_length < 0:
raise BuildError(f'Asked to draw path with total length {length:g}, shorter than required bends and tapers:\n'
f'bend: {bend_dxy[0]:g} in_taper: {abs(itrans_dxy[0])} out_taper: {otrans_dxy[1]}')
gen_straight, sport_in, sport_out = self.straight
tree = Library()
bb = Builder(library=tree, name='_path').add_port_pair(names=port_names)
if in_transition:
bb.plug(ipat, {port_names[1]: iport_theirs})
if not numpy.isclose(straight_length, 0):
straight = tree << {'_straight': gen_straight(straight_length)}
bb.plug(straight, {port_names[1]: sport_in})
if ccw is not None:
bb.plug(bend, {port_names[1]: bport_in}, mirrored=(False, bool(ccw)))
if out_transition:
bb.plug(opat, {port_names[1]: oport_ours})
return bb.pattern

View File

@ -1,26 +1,26 @@
from typing import Dict, Tuple, List, Optional, Union, Any, cast, Sequence, TYPE_CHECKING from typing import Mapping, Sequence, SupportsFloat, cast, TYPE_CHECKING
from pprint import pformat from pprint import pformat
import numpy import numpy
from numpy import pi from numpy import pi
from numpy.typing import ArrayLike from numpy.typing import ArrayLike, NDArray
from ..utils import rotation_matrix_2d from ..utils import rotation_matrix_2d, SupportsBool
from ..error import BuildError from ..error import BuildError
if TYPE_CHECKING: if TYPE_CHECKING:
from .devices import Port from ..ports import Port
def ell( def ell(
ports: Dict[str, 'Port'], ports: Mapping[str, 'Port'],
ccw: Optional[bool], ccw: SupportsBool | None,
bound_type: str, bound_type: str,
bound: Union[float, ArrayLike], bound: float | ArrayLike,
*, *,
spacing: Optional[Union[float, ArrayLike]] = None, spacing: float | ArrayLike | None = None,
set_rotation: Optional[float] = None, set_rotation: float | None = None,
) -> Dict[str, float]: ) -> dict[str, float]:
""" """
Calculate extension for each port in order to build a 90-degree bend with the provided Calculate extension for each port in order to build a 90-degree bend with the provided
channel spacing: channel spacing:
@ -135,6 +135,7 @@ def ell(
# D-----------| `d_to_align[3]` # D-----------| `d_to_align[3]`
# #
d_to_align = x_start.max() - x_start # distance to travel to align all d_to_align = x_start.max() - x_start # distance to travel to align all
offsets: NDArray[numpy.float64]
if bound_type == 'min_past_furthest': if bound_type == 'min_past_furthest':
# A------------------V `d_to_exit[0]` # A------------------V `d_to_exit[0]`
# B-----V `d_to_exit[1]` # B-----V `d_to_exit[1]`
@ -154,6 +155,7 @@ def ell(
travel = d_to_align - (ch_offsets.max() - ch_offsets) travel = d_to_align - (ch_offsets.max() - ch_offsets)
offsets = travel - travel.min().clip(max=0) offsets = travel - travel.min().clip(max=0)
rot_bound: SupportsFloat
if bound_type in ('emin', 'min_extension', if bound_type in ('emin', 'min_extension',
'emax', 'max_extension', 'emax', 'max_extension',
'min_past_furthest',): 'min_past_furthest',):
@ -189,7 +191,8 @@ def ell(
offsets += extension offsets += extension
if extension < 0: if extension < 0:
raise BuildError(f'Position is too close by at least {-numpy.floor(extension)}. Total extensions would be' ext_floor = -numpy.floor(extension)
raise BuildError(f'Position is too close by at least {ext_floor}. Total extensions would be\n\t'
+ '\n\t'.join(f'{key}: {off}' for key, off in zip(ports.keys(), offsets))) + '\n\t'.join(f'{key}: {off}' for key, off in zip(ports.keys(), offsets)))
result = dict(zip(ports.keys(), offsets)) result = dict(zip(ports.keys(), offsets))

View File

@ -11,13 +11,6 @@ class PatternError(MasqueError):
""" """
pass pass
class PatternLockedError(PatternError):
"""
Exception raised when trying to modify a locked pattern
"""
def __init__(self):
PatternError.__init__(self, 'Tried to modify a locked Pattern, subpattern, or shape')
class LibraryError(MasqueError): class LibraryError(MasqueError):
""" """
@ -26,22 +19,21 @@ class LibraryError(MasqueError):
pass pass
class DeviceLibraryError(MasqueError):
"""
Exception raised by DeviceLibrary classes
"""
pass
class DeviceError(MasqueError):
"""
Exception raised by Device and Port objects
"""
pass
class BuildError(MasqueError): class BuildError(MasqueError):
""" """
Exception raised by builder-related functions Exception raised by builder-related functions
""" """
pass pass
class PortError(MasqueError):
"""
Exception raised by builder-related functions
"""
pass
class OneShotError(MasqueError):
"""
Exception raised when a function decorated with `@oneshot` is called more than once
"""
def __init__(self, func_name: str) -> None:
Exception.__init__(self, f'Function "{func_name}" with @oneshot was called more than once')

View File

@ -1,20 +1,25 @@
""" """
DXF file format readers and writers DXF file format readers and writers
Notes:
* Gzip modification time is set to 0 (start of current epoch, usually 1970-01-01)
* ezdxf sets creation time, write time, $VERSIONGUID, and $FINGERPRINTGUID
to unique values, so byte-for-byte reproducibility is not achievable for now
""" """
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable from typing import Any, Callable, Mapping, cast, TextIO, IO
import re
import io import io
import base64
import struct
import logging import logging
import pathlib import pathlib
import gzip import gzip
import numpy # type: ignore import numpy
import ezdxf # type: ignore import ezdxf
from ezdxf.enums import TextEntityAlignment
from .. import Pattern, SubPattern, PatternError, Label, Shape from .utils import is_gzipped, tmpfile
from ..shapes import Polygon, Path from .. import Pattern, Ref, PatternError, Label
from ..library import ILibraryView, LibraryView, Library
from ..shapes import Shape, Polygon, Path
from ..repetition import Grid from ..repetition import Grid
from ..utils import rotation_matrix_2d, layer_t from ..utils import rotation_matrix_2d, layer_t
@ -22,24 +27,23 @@ from ..utils import rotation_matrix_2d, layer_t
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.warning('DXF support is experimental and only slightly tested!') logger.warning('DXF support is experimental!')
DEFAULT_LAYER = 'DEFAULT' DEFAULT_LAYER = 'DEFAULT'
def write( def write(
pattern: Pattern, library: Mapping[str, Pattern], # TODO could allow library=None for flat DXF
stream: io.TextIOBase, top_name: str,
stream: TextIO,
*, *,
modify_originals: bool = False,
dxf_version='AC1024', dxf_version='AC1024',
disambiguate_func: Callable[[Iterable[Pattern]], None] = None,
) -> None: ) -> None:
""" """
Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes
into polygons, and then writing patterns as DXF `Block`s, polygons as `LWPolyline`s, into polygons, and then writing patterns as DXF `Block`s, polygons as `LWPolyline`s,
and subpatterns as `Insert`s. and refs as `Insert`s.
The top level pattern's name is not written to the DXF file. Nested patterns keep their The top level pattern's name is not written to the DXF file. Nested patterns keep their
names. names.
@ -49,60 +53,61 @@ def write(
tuple: (1, 2) -> '1.2' tuple: (1, 2) -> '1.2'
str: '1.2' -> '1.2' (no change) str: '1.2' -> '1.2' (no change)
It is often a good idea to run `pattern.subpatternize()` prior to calling this function, DXF does not support shape repetition (only block repeptition). Please call
especially if calling `.polygonize()` will result in very many vertices. library.wrap_repeated_shapes() before writing to file.
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()` Other functions you may want to call:
prior to calling this function. - `masque.file.oasis.check_valid_names(library.keys())` to check for invalid names
- `library.dangling_refs()` to check for references to missing patterns
- `pattern.polygonize()` for any patterns with shapes other
than `masque.shapes.Polygon` or `masque.shapes.Path`
Only `Grid` repetition objects with manhattan basis vectors are preserved as arrays. Since DXF Only `Grid` repetition objects with manhattan basis vectors are preserved as arrays. Since DXF
rotations apply to basis vectors while `masque`'s rotations do not, the basis vectors of an rotations apply to basis vectors while `masque`'s rotations do not, the basis vectors of an
array with rotated instances must be manhattan _after_ having a compensating rotation applied. array with rotated instances must be manhattan _after_ having a compensating rotation applied.
Args: Args:
patterns: A Pattern or list of patterns to write to the stream. library: A {name: Pattern} mapping of patterns. Only `top_name` and patterns referenced
by it are written.
top_name: Name of the top-level pattern to write.
stream: Stream object to write to. stream: Stream object to write to.
modify_original: If `True`, the original pattern is modified as part of the writing
process. Otherwise, a copy is made and `deepunlock()`-ed.
Default `False`.
disambiguate_func: Function which takes a list of patterns and alters them
to make their names valid and unique. Default is `disambiguate_pattern_names`.
WARNING: No additional error checking is performed on the results.
""" """
#TODO consider supporting DXF arcs? #TODO consider supporting DXF arcs?
if disambiguate_func is None: if not isinstance(library, ILibraryView):
disambiguate_func = lambda pats: disambiguate_pattern_names(pats) if isinstance(library, dict):
assert(disambiguate_func is not None) library = LibraryView(library)
else:
library = LibraryView(dict(library))
if not modify_originals: pattern = library[top_name]
pattern = pattern.deepcopy().deepunlock() subtree = library.subtree(top_name)
# Get a dict of id(pattern) -> pattern
patterns_by_id = pattern.referenced_patterns_by_id()
disambiguate_func(patterns_by_id.values())
# Create library # Create library
lib = ezdxf.new(dxf_version, setup=True) lib = ezdxf.new(dxf_version, setup=True)
msp = lib.modelspace() msp = lib.modelspace()
_shapes_to_elements(msp, pattern.shapes) _shapes_to_elements(msp, pattern.shapes)
_labels_to_texts(msp, pattern.labels) _labels_to_texts(msp, pattern.labels)
_subpatterns_to_refs(msp, pattern.subpatterns) _mrefs_to_drefs(msp, pattern.refs)
# Now create a block for each referenced pattern, and add in any shapes # Now create a block for each referenced pattern, and add in any shapes
for pat in patterns_by_id.values(): for name, pat in subtree.items():
assert(pat is not None) assert pat is not None
block = lib.blocks.new(name=pat.name) if name == top_name:
continue
block = lib.blocks.new(name=name)
_shapes_to_elements(block, pat.shapes) _shapes_to_elements(block, pat.shapes)
_labels_to_texts(block, pat.labels) _labels_to_texts(block, pat.labels)
_subpatterns_to_refs(block, pat.subpatterns) _mrefs_to_drefs(block, pat.refs)
lib.write(stream) lib.write(stream)
def writefile( def writefile(
pattern: Pattern, library: Mapping[str, Pattern],
filename: Union[str, pathlib.Path], top_name: str,
filename: str | pathlib.Path,
*args, *args,
**kwargs, **kwargs,
) -> None: ) -> None:
@ -112,30 +117,42 @@ def writefile(
Will automatically compress the file if it has a .gz suffix. Will automatically compress the file if it has a .gz suffix.
Args: Args:
pattern: `Pattern` to save library: A {name: Pattern} mapping of patterns. Only `top_name` and patterns referenced
by it are written.
top_name: Name of the top-level pattern to write.
filename: Filename to save to. filename: Filename to save to.
*args: passed to `dxf.write` *args: passed to `dxf.write`
**kwargs: passed to `dxf.write` **kwargs: passed to `dxf.write`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if path.suffix == '.gz':
open_func: Callable = gzip.open
else:
open_func = open
with open_func(path, mode='wt') as stream: gz_stream: IO[bytes]
write(pattern, stream, *args, **kwargs) with tmpfile(path) as base_stream:
streams: tuple[Any, ...] = (base_stream,)
if path.suffix == '.gz':
gz_stream = cast(IO[bytes], gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
streams = (gz_stream,) + streams
else:
gz_stream = base_stream
stream = io.TextIOWrapper(gz_stream) # type: ignore
streams = (stream,) + streams
try:
write(library, top_name, stream, *args, **kwargs)
finally:
for ss in streams:
ss.close()
def readfile( def readfile(
filename: Union[str, pathlib.Path], filename: str | pathlib.Path,
*args, *args,
**kwargs, **kwargs,
) -> Tuple[Pattern, Dict[str, Any]]: ) -> tuple[Library, dict[str, Any]]:
""" """
Wrapper for `dxf.read()` that takes a filename or path instead of a stream. Wrapper for `dxf.read()` that takes a filename or path instead of a stream.
Will automatically decompress files with a .gz suffix. Will automatically decompress gzipped files.
Args: Args:
filename: Filename to save to. filename: Filename to save to.
@ -143,7 +160,7 @@ def readfile(
**kwargs: passed to `dxf.read` **kwargs: passed to `dxf.read`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if path.suffix == '.gz': if is_gzipped(path):
open_func: Callable = gzip.open open_func: Callable = gzip.open
else: else:
open_func = open open_func = open
@ -154,21 +171,17 @@ def readfile(
def read( def read(
stream: io.TextIOBase, stream: TextIO,
clean_vertices: bool = True, ) -> tuple[Library, dict[str, Any]]:
) -> Tuple[Pattern, Dict[str, Any]]:
""" """
Read a dxf file and translate it into a dict of `Pattern` objects. DXF `Block`s are Read a dxf file and translate it into a dict of `Pattern` objects. DXF `Block`s are
translated into `Pattern` objects; `LWPolyline`s are translated into polygons, and `Insert`s translated into `Pattern` objects; `LWPolyline`s are translated into polygons, and `Insert`s
are translated into `SubPattern` objects. are translated into `Ref` objects.
If an object has no layer it is set to this module's `DEFAULT_LAYER` ("DEFAULT"). If an object has no layer it is set to this module's `DEFAULT_LAYER` ("DEFAULT").
Args: Args:
stream: Stream to read from. stream: Stream to read from.
clean_vertices: If `True`, remove any redundant vertices when loading polygons.
The cleaning process removes any polygons with zero area or <3 vertices.
Default `True`.
Returns: Returns:
- Top level pattern - Top level pattern
@ -176,26 +189,24 @@ def read(
lib = ezdxf.read(stream) lib = ezdxf.read(stream)
msp = lib.modelspace() msp = lib.modelspace()
pat = _read_block(msp, clean_vertices) top_name, top_pat = _read_block(msp)
patterns = [pat] + [_read_block(bb, clean_vertices) for bb in lib.blocks if bb.name != '*Model_Space'] mlib = Library({top_name: top_pat})
for bb in lib.blocks:
if bb.name == '*Model_Space':
continue
name, pat = _read_block(bb)
mlib[name] = pat
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries library_info = dict(
# according to the subpattern.identifier (which is deleted after use). layers=[ll.dxfattribs() for ll in lib.layers],
patterns_dict = dict(((p.name, p) for p in patterns)) )
for p in patterns_dict.values():
for sp in p.subpatterns:
sp.pattern = patterns_dict[sp.identifier[0]]
del sp.identifier
library_info = { return mlib, library_info
'layers': [ll.dxfattribs() for ll in lib.layers]
}
return pat, library_info
def _read_block(block, clean_vertices: bool) -> Pattern: def _read_block(block) -> tuple[str, Pattern]:
pat = Pattern(block.name) name = block.name
pat = Pattern()
for element in block: for element in block:
eltype = element.dxftype() eltype = element.dxftype()
if eltype in ('POLYLINE', 'LWPOLYLINE'): if eltype in ('POLYLINE', 'LWPOLYLINE'):
@ -219,24 +230,19 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
if width == 0: if width == 0:
width = attr.get('const_width', 0) width = attr.get('const_width', 0)
shape: Union[Path, Polygon] shape: Path | Polygon
if width == 0 and len(points) > 2 and numpy.array_equal(points[0], points[-1]): if width == 0 and len(points) > 2 and numpy.array_equal(points[0], points[-1]):
shape = Polygon(layer=layer, vertices=points[:-1, :2]) shape = Polygon(layer=layer, vertices=points[:-1, :2])
else: else:
shape = Path(layer=layer, width=width, vertices=points[:, :2]) shape = Path(layer=layer, width=width, vertices=points[:, :2])
if clean_vertices:
try:
shape.clean_vertices()
except PatternError:
continue
pat.shapes.append(shape) pat.shapes.append(shape)
elif eltype in ('TEXT',): elif eltype in ('TEXT',):
args = {'offset': numpy.array(element.get_pos()[1])[:2], args = dict(
'layer': element.dxfattribs().get('layer', DEFAULT_LAYER), offset=numpy.array(element.get_pos()[1])[:2],
} layer=element.dxfattribs().get('layer', DEFAULT_LAYER),
)
string = element.dxfattribs().get('text', '') string = element.dxfattribs().get('text', '')
# height = element.dxfattribs().get('height', 0) # height = element.dxfattribs().get('height', 0)
# if height != 0: # if height != 0:
@ -257,82 +263,89 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
offset = numpy.array(attr.get('insert', (0, 0, 0)))[:2] offset = numpy.array(attr.get('insert', (0, 0, 0)))[:2]
args = { args = dict(
'offset': offset, target=attr.get('name', None),
'scale': scale, offset=offset,
'mirrored': mirrored, scale=scale,
'rotation': rotation, mirrored=mirrored,
'pattern': None, rotation=rotation,
'identifier': (attr.get('name', None),), )
}
if 'column_count' in attr: if 'column_count' in attr:
args['repetition'] = Grid(a_vector=(attr['column_spacing'], 0), args['repetition'] = Grid(
a_vector=(attr['column_spacing'], 0),
b_vector=(0, attr['row_spacing']), b_vector=(0, attr['row_spacing']),
a_count=attr['column_count'], a_count=attr['column_count'],
b_count=attr['row_count']) b_count=attr['row_count'],
pat.subpatterns.append(SubPattern(**args)) )
pat.ref(**args)
else: else:
logger.warning(f'Ignoring DXF element {element.dxftype()} (not implemented).') logger.warning(f'Ignoring DXF element {element.dxftype()} (not implemented).')
return pat return name, pat
def _subpatterns_to_refs( def _mrefs_to_drefs(
block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace], block: ezdxf.layouts.BlockLayout | ezdxf.layouts.Modelspace,
subpatterns: List[SubPattern], refs: list[Ref],
) -> None: ) -> None:
for subpat in subpatterns: for ref in refs:
if subpat.pattern is None: if ref.target is None:
continue continue
encoded_name = subpat.pattern.name encoded_name = ref.target
rotation = (subpat.rotation * 180 / numpy.pi) % 360 rotation = numpy.rad2deg(ref.rotation) % 360
attribs = { attribs = dict(
'xscale': subpat.scale * (-1 if subpat.mirrored[1] else 1), xscale=ref.scale * (-1 if ref.mirrored[1] else 1),
'yscale': subpat.scale * (-1 if subpat.mirrored[0] else 1), yscale=ref.scale * (-1 if ref.mirrored[0] else 1),
'rotation': rotation, rotation=rotation,
} )
rep = subpat.repetition rep = ref.repetition
if rep is None: if rep is None:
block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs) block.add_blockref(encoded_name, ref.offset, dxfattribs=attribs)
elif isinstance(rep, Grid): elif isinstance(rep, Grid):
a = rep.a_vector a = rep.a_vector
b = rep.b_vector if rep.b_vector is not None else numpy.zeros(2) b = rep.b_vector if rep.b_vector is not None else numpy.zeros(2)
rotated_a = rotation_matrix_2d(-subpat.rotation) @ a rotated_a = rotation_matrix_2d(-ref.rotation) @ a
rotated_b = rotation_matrix_2d(-subpat.rotation) @ b rotated_b = rotation_matrix_2d(-ref.rotation) @ b
if rotated_a[1] == 0 and rotated_b[0] == 0: if rotated_a[1] == 0 and rotated_b[0] == 0:
attribs['column_count'] = rep.a_count attribs['column_count'] = rep.a_count
attribs['row_count'] = rep.b_count attribs['row_count'] = rep.b_count
attribs['column_spacing'] = rotated_a[0] attribs['column_spacing'] = rotated_a[0]
attribs['row_spacing'] = rotated_b[1] attribs['row_spacing'] = rotated_b[1]
block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs) block.add_blockref(encoded_name, ref.offset, dxfattribs=attribs)
elif rotated_a[0] == 0 and rotated_b[1] == 0: elif rotated_a[0] == 0 and rotated_b[1] == 0:
attribs['column_count'] = rep.b_count attribs['column_count'] = rep.b_count
attribs['row_count'] = rep.a_count attribs['row_count'] = rep.a_count
attribs['column_spacing'] = rotated_b[0] attribs['column_spacing'] = rotated_b[0]
attribs['row_spacing'] = rotated_a[1] attribs['row_spacing'] = rotated_a[1]
block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs) block.add_blockref(encoded_name, ref.offset, dxfattribs=attribs)
else: else:
#NOTE: We could still do non-manhattan (but still orthogonal) grids by getting #NOTE: We could still do non-manhattan (but still orthogonal) grids by getting
# creative with counter-rotated nested patterns, but probably not worth it. # creative with counter-rotated nested patterns, but probably not worth it.
# Instead, just break appart the grid into individual elements: # Instead, just break appart the grid into individual elements:
for dd in rep.displacements: for dd in rep.displacements:
block.add_blockref(encoded_name, subpat.offset + dd, dxfattribs=attribs) block.add_blockref(encoded_name, ref.offset + dd, dxfattribs=attribs)
else: else:
for dd in rep.displacements: for dd in rep.displacements:
block.add_blockref(encoded_name, subpat.offset + dd, dxfattribs=attribs) block.add_blockref(encoded_name, ref.offset + dd, dxfattribs=attribs)
def _shapes_to_elements( def _shapes_to_elements(
block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace], block: ezdxf.layouts.BlockLayout | ezdxf.layouts.Modelspace,
shapes: List[Shape], shapes: list[Shape],
polygonize_paths: bool = False, polygonize_paths: bool = False,
) -> None: ) -> None:
# Add `LWPolyline`s for each shape. # Add `LWPolyline`s for each shape.
# Could set do paths with width setting, but need to consider endcaps. # Could set do paths with width setting, but need to consider endcaps.
for shape in shapes: for shape in shapes:
attribs = {'layer': _mlayer2dxf(shape.layer)} if shape.repetition is not None:
raise PatternError(
'Shape repetitions are not supported by DXF.'
' Please call library.wrap_repeated_shapes() before writing to file.'
)
attribs = dict(layer=_mlayer2dxf(shape.layer))
for polygon in shape.to_polygons(): for polygon in shape.to_polygons():
xy_open = polygon.vertices + polygon.offset xy_open = polygon.vertices + polygon.offset
xy_closed = numpy.vstack((xy_open, xy_open[0, :])) xy_closed = numpy.vstack((xy_open, xy_open[0, :]))
@ -340,13 +353,13 @@ def _shapes_to_elements(
def _labels_to_texts( def _labels_to_texts(
block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace], block: ezdxf.layouts.BlockLayout | ezdxf.layouts.Modelspace,
labels: List[Label], labels: list[Label],
) -> None: ) -> None:
for label in labels: for label in labels:
attribs = {'layer': _mlayer2dxf(label.layer)} attribs = dict(layer=_mlayer2dxf(label.layer))
xy = label.offset xy = label.offset
block.add_text(label.string, dxfattribs=attribs).set_pos(xy, align='BOTTOM_LEFT') block.add_text(label.string, dxfattribs=attribs).set_placement(xy, align=TextEntityAlignment.BOTTOM_LEFT)
def _mlayer2dxf(layer: layer_t) -> str: def _mlayer2dxf(layer: layer_t) -> str:
@ -357,40 +370,3 @@ def _mlayer2dxf(layer: layer_t) -> str:
if isinstance(layer, tuple): if isinstance(layer, tuple):
return f'{layer[0]}.{layer[1]}' return f'{layer[0]}.{layer[1]}'
raise PatternError(f'Unknown layer type: {layer} ({type(layer)})') raise PatternError(f'Unknown layer type: {layer} ({type(layer)})')
def disambiguate_pattern_names(
patterns: Iterable[Pattern],
max_name_length: int = 32,
suffix_length: int = 6,
dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
) -> None:
used_names = []
for pat in patterns:
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name)
i = 0
suffixed_name = sanitized_name
while suffixed_name in used_names or suffixed_name == '':
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
i += 1
if sanitized_name == '':
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
+ f' renaming to "{suffixed_name}"')
if len(suffixed_name) == 0:
# Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize,\n originally "{pat.name}"')
if len(suffixed_name) > max_name_length:
raise PatternError(f'Pattern name "{suffixed_name!r}" length > {max_name_length} after encode,\n'
+ f' originally "{pat.name}"')
pat.name = suffixed_name
used_names.append(suffixed_name)

View File

@ -16,31 +16,30 @@ Notes:
* PLEX is not supported * PLEX is not supported
* ELFLAGS are not supported * ELFLAGS are not supported
* GDS does not support library- or structure-level annotations * GDS does not support library- or structure-level annotations
* Creation/modification/access times are set to 1900-01-01 for reproducibility. * GDS creation/modification/access times are set to 1900-01-01 for reproducibility.
* Gzip modification time is set to 0 (start of current epoch, usually 1970-01-01)
""" """
from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional from typing import Callable, Iterable, Mapping, IO, cast, Any
from typing import Sequence, BinaryIO
import re
import io import io
import mmap import mmap
import copy
import base64
import struct
import logging import logging
import pathlib import pathlib
import gzip import gzip
import string
from pprint import pformat
import numpy import numpy
from numpy.typing import NDArray, ArrayLike from numpy.typing import ArrayLike, NDArray
import klamath import klamath
from klamath import records from klamath import records
from .utils import is_gzipped from .utils import is_gzipped, tmpfile
from .. import Pattern, SubPattern, PatternError, Label, Shape from .. import Pattern, Ref, PatternError, LibraryError, Label, Shape
from ..shapes import Polygon, Path from ..shapes import Polygon, Path
from ..repetition import Grid from ..repetition import Grid
from ..utils import layer_t, normalize_mirror, annotations_t from ..utils import layer_t, normalize_mirror, annotations_t
from ..library import Library from ..library import LazyLibrary, Library, ILibrary, ILibraryView
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -53,20 +52,21 @@ path_cap_map = {
} }
def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]:
return numpy.rint(val).astype(numpy.int32)
def write( def write(
patterns: Union[Pattern, Sequence[Pattern]], library: Mapping[str, Pattern],
stream: BinaryIO, stream: IO[bytes],
meters_per_unit: float, meters_per_unit: float,
logical_units_per_unit: float = 1, logical_units_per_unit: float = 1,
library_name: str = 'masque-klamath', library_name: str = 'masque-klamath',
*,
modify_originals: bool = False,
disambiguate_func: Callable[[Iterable[Pattern]], None] = None,
) -> None: ) -> None:
""" """
Convert a `Pattern` or list of patterns to a GDSII stream, and then mapping data as follows: Convert a library to a GDSII stream, mapping data as follows:
Pattern -> GDSII structure Pattern -> GDSII structure
SubPattern -> GDSII SREF or AREF Ref -> GDSII SREF or AREF
Path -> GSDII path Path -> GSDII path
Shape (other than path) -> GDSII boundary/ies Shape (other than path) -> GDSII boundary/ies
Label -> GDSII text Label -> GDSII text
@ -78,14 +78,17 @@ def write(
datatype is chosen to be `shape.layer[1]` if available, datatype is chosen to be `shape.layer[1]` if available,
otherwise `0` otherwise `0`
It is often a good idea to run `pattern.subpatternize()` prior to calling this function, GDS does not support shape repetition (only cell repeptition). Please call
especially if calling `.polygonize()` will result in very many vertices. `library.wrap_repeated_shapes()` before writing to file.
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()` Other functions you may want to call:
prior to calling this function. - `masque.file.gdsii.check_valid_names(library.keys())` to check for invalid names
- `library.dangling_refs()` to check for references to missing patterns
- `pattern.polygonize()` for any patterns with shapes other
than `masque.shapes.Polygon` or `masque.shapes.Path`
Args: Args:
patterns: A Pattern or list of patterns to convert. library: A {name: Pattern} mapping of patterns to write.
meters_per_unit: Written into the GDSII file, meters per (database) length unit. meters_per_unit: Written into the GDSII file, meters per (database) length unit.
All distances are assumed to be an integer multiple of this unit, and are stored as such. All distances are assumed to be an integer multiple of this unit, and are stored as such.
logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a
@ -93,54 +96,35 @@ def write(
Default `1`. Default `1`.
library_name: Library name written into the GDSII file. library_name: Library name written into the GDSII file.
Default 'masque-klamath'. Default 'masque-klamath'.
modify_originals: If `True`, the original pattern is modified as part of the writing
process. Otherwise, a copy is made and `deepunlock()`-ed.
Default `False`.
disambiguate_func: Function which takes a list of patterns and alters them
to make their names valid and unique. Default is `disambiguate_pattern_names`, which
attempts to adhere to the GDSII standard as well as possible.
WARNING: No additional error checking is performed on the results.
""" """
if isinstance(patterns, Pattern): if not isinstance(library, ILibrary):
patterns = [patterns] if isinstance(library, dict):
library = Library(library)
if disambiguate_func is None: else:
disambiguate_func = disambiguate_pattern_names # type: ignore library = Library(dict(library))
assert(disambiguate_func is not None) # placate mypy
if not modify_originals:
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
patterns = [p.wrap_repeated_shapes() for p in patterns]
# Create library # Create library
header = klamath.library.FileHeader(name=library_name.encode('ASCII'), header = klamath.library.FileHeader(
name=library_name.encode('ASCII'),
user_units_per_db_unit=logical_units_per_unit, user_units_per_db_unit=logical_units_per_unit,
meters_per_db_unit=meters_per_unit) meters_per_db_unit=meters_per_unit,
)
header.write(stream) header.write(stream)
# Get a dict of id(pattern) -> pattern
patterns_by_id = {id(pattern): pattern for pattern in patterns}
for pattern in patterns:
for i, p in pattern.referenced_patterns_by_id().items():
patterns_by_id[i] = p
disambiguate_func(patterns_by_id.values())
# Now create a structure for each pattern, and add in any Boundary and SREF elements # Now create a structure for each pattern, and add in any Boundary and SREF elements
for pat in patterns_by_id.values(): for name, pat in library.items():
elements: List[klamath.elements.Element] = [] elements: list[klamath.elements.Element] = []
elements += _shapes_to_elements(pat.shapes) elements += _shapes_to_elements(pat.shapes)
elements += _labels_to_texts(pat.labels) elements += _labels_to_texts(pat.labels)
elements += _subpatterns_to_refs(pat.subpatterns) elements += _mrefs_to_grefs(pat.refs)
klamath.library.write_struct(stream, name=pat.name.encode('ASCII'), elements=elements) klamath.library.write_struct(stream, name=name.encode('ASCII'), elements=elements)
records.ENDLIB.write(stream, None) records.ENDLIB.write(stream, None)
def writefile( def writefile(
patterns: Union[Sequence[Pattern], Pattern], library: Mapping[str, Pattern],
filename: Union[str, pathlib.Path], filename: str | pathlib.Path,
*args, *args,
**kwargs, **kwargs,
) -> None: ) -> None:
@ -150,26 +134,33 @@ def writefile(
Will automatically compress the file if it has a .gz suffix. Will automatically compress the file if it has a .gz suffix.
Args: Args:
patterns: `Pattern` or list of patterns to save library: {name: Pattern} pairs to save.
filename: Filename to save to. filename: Filename to save to.
*args: passed to `write()` *args: passed to `write()`
**kwargs: passed to `write()` **kwargs: passed to `write()`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if path.suffix == '.gz':
open_func: Callable = gzip.open
else:
open_func = open
with io.BufferedWriter(open_func(path, mode='wb')) as stream: with tmpfile(path) as base_stream:
write(patterns, stream, *args, **kwargs) streams: tuple[Any, ...] = (base_stream,)
if path.suffix == '.gz':
stream = cast(IO[bytes], gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
streams = (stream,) + streams
else:
stream = base_stream
try:
write(library, stream, *args, **kwargs)
finally:
for ss in streams:
ss.close()
def readfile( def readfile(
filename: Union[str, pathlib.Path], filename: str | pathlib.Path,
*args, *args,
**kwargs, **kwargs,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: ) -> tuple[Library, dict[str, Any]]:
""" """
Wrapper for `read()` that takes a filename or path instead of a stream. Wrapper for `read()` that takes a filename or path instead of a stream.
@ -186,19 +177,20 @@ def readfile(
else: else:
open_func = open open_func = open
with io.BufferedReader(open_func(path, mode='rb')) as stream: with open_func(path, mode='rb') as stream:
results = read(stream, *args, **kwargs) results = read(stream, *args, **kwargs)
return results return results
def read( def read(
stream: BinaryIO, stream: IO[bytes],
raw_mode: bool = True, raw_mode: bool = True,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: ) -> tuple[Library, dict[str, Any]]:
""" """
# TODO check GDSII file for cycles!
Read a gdsii file and translate it into a dict of Pattern objects. GDSII structures are Read a gdsii file and translate it into a dict of Pattern objects. GDSII structures are
translated into Pattern objects; boundaries are translated into polygons, and srefs and arefs translated into Pattern objects; boundaries are translated into polygons, and srefs and arefs
are translated into SubPattern objects. are translated into Ref objects.
Additional library info is returned in a dict, containing: Additional library info is returned in a dict, containing:
'name': name of the library 'name': name of the library
@ -211,31 +203,23 @@ def read(
raw_mode: If True, constructs shapes in raw mode, bypassing most data validation, Default True. raw_mode: If True, constructs shapes in raw mode, bypassing most data validation, Default True.
Returns: Returns:
- Dict of pattern_name:Patterns generated from GDSII structures - dict of pattern_name:Patterns generated from GDSII structures
- Dict of GDSII library info - dict of GDSII library info
""" """
library_info = _read_header(stream) library_info = _read_header(stream)
patterns = [] mlib = Library()
found_struct = records.BGNSTR.skip_past(stream) found_struct = records.BGNSTR.skip_past(stream)
while found_struct: while found_struct:
name = records.STRNAME.skip_and_read(stream) name = records.STRNAME.skip_and_read(stream)
pat = read_elements(stream, name=name.decode('ASCII'), raw_mode=raw_mode) pat = read_elements(stream, raw_mode=raw_mode)
patterns.append(pat) mlib[name.decode('ASCII')] = pat
found_struct = records.BGNSTR.skip_past(stream) found_struct = records.BGNSTR.skip_past(stream)
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries return mlib, library_info
# according to the subpattern.identifier (which is deleted after use).
patterns_dict = dict(((p.name, p) for p in patterns))
for p in patterns_dict.values():
for sp in p.subpatterns:
sp.pattern = patterns_dict[sp.identifier[0]]
del sp.identifier
return patterns_dict, library_info
def _read_header(stream: BinaryIO) -> Dict[str, Any]: def _read_header(stream: IO[bytes]) -> dict[str, Any]:
""" """
Read the file header and create the library_info dict. Read the file header and create the library_info dict.
""" """
@ -249,8 +233,7 @@ def _read_header(stream: BinaryIO) -> Dict[str, Any]:
def read_elements( def read_elements(
stream: BinaryIO, stream: IO[bytes],
name: str,
raw_mode: bool = True, raw_mode: bool = True,
) -> Pattern: ) -> Pattern:
""" """
@ -265,7 +248,7 @@ def read_elements(
Returns: Returns:
A pattern containing the elements that were read. A pattern containing the elements that were read.
""" """
pat = Pattern(name) pat = Pattern()
elements = klamath.library.read_elements(stream) elements = klamath.library.read_elements(stream)
for element in elements: for element in elements:
@ -276,17 +259,19 @@ def read_elements(
path = _gpath_to_mpath(element, raw_mode) path = _gpath_to_mpath(element, raw_mode)
pat.shapes.append(path) pat.shapes.append(path)
elif isinstance(element, klamath.elements.Text): elif isinstance(element, klamath.elements.Text):
label = Label(offset=element.xy.astype(float), label = Label(
offset=element.xy.astype(float),
layer=element.layer, layer=element.layer,
string=element.string.decode('ASCII'), string=element.string.decode('ASCII'),
annotations=_properties_to_annotations(element.properties)) annotations=_properties_to_annotations(element.properties),
)
pat.labels.append(label) pat.labels.append(label)
elif isinstance(element, klamath.elements.Reference): elif isinstance(element, klamath.elements.Reference):
pat.subpatterns.append(_ref_to_subpat(element)) pat.refs.append(_gref_to_mref(element))
return pat return pat
def _mlayer2gds(mlayer: layer_t) -> Tuple[int, int]: def _mlayer2gds(mlayer: layer_t) -> tuple[int, int]:
""" Helper to turn a layer tuple-or-int into a layer and datatype""" """ Helper to turn a layer tuple-or-int into a layer and datatype"""
if isinstance(mlayer, int): if isinstance(mlayer, int):
layer = mlayer layer = mlayer
@ -302,10 +287,9 @@ def _mlayer2gds(mlayer: layer_t) -> Tuple[int, int]:
return layer, data_type return layer, data_type
def _ref_to_subpat(ref: klamath.library.Reference) -> SubPattern: def _gref_to_mref(ref: klamath.library.Reference) -> Ref:
""" """
Helper function to create a SubPattern from an SREF or AREF. Sets subpat.pattern to None Helper function to create a Ref from an SREF or AREF. Sets ref.target to struct_name.
and sets the instance .identifier to (struct_name,).
""" """
xy = ref.xy.astype(float) xy = ref.xy.astype(float)
offset = xy[0] offset = xy[0]
@ -317,15 +301,16 @@ def _ref_to_subpat(ref: klamath.library.Reference) -> SubPattern:
repetition = Grid(a_vector=a_vector, b_vector=b_vector, repetition = Grid(a_vector=a_vector, b_vector=b_vector,
a_count=a_count, b_count=b_count) a_count=a_count, b_count=b_count)
subpat = SubPattern(pattern=None, mref = Ref(
target=ref.struct_name.decode('ASCII'),
offset=offset, offset=offset,
rotation=numpy.deg2rad(ref.angle_deg), rotation=numpy.deg2rad(ref.angle_deg),
scale=ref.mag, scale=ref.mag,
mirrored=(ref.invert_y, False), mirrored=(ref.invert_y, False),
annotations=_properties_to_annotations(ref.properties), annotations=_properties_to_annotations(ref.properties),
repetition=repetition) repetition=repetition,
subpat.identifier = (ref.struct_name.decode('ASCII'),) )
return subpat return mref
def _gpath_to_mpath(gpath: klamath.library.Path, raw_mode: bool) -> Path: def _gpath_to_mpath(gpath: klamath.library.Path, raw_mode: bool) -> Path:
@ -334,7 +319,8 @@ def _gpath_to_mpath(gpath: klamath.library.Path, raw_mode: bool) -> Path:
else: else:
raise PatternError(f'Unrecognized path type: {gpath.path_type}') raise PatternError(f'Unrecognized path type: {gpath.path_type}')
mpath = Path(vertices=gpath.xy.astype(float), mpath = Path(
vertices=gpath.xy.astype(float),
layer=gpath.layer, layer=gpath.layer,
width=gpath.width, width=gpath.width,
cap=cap, cap=cap,
@ -348,7 +334,8 @@ def _gpath_to_mpath(gpath: klamath.library.Path, raw_mode: bool) -> Path:
def _boundary_to_polygon(boundary: klamath.library.Boundary, raw_mode: bool) -> Polygon: def _boundary_to_polygon(boundary: klamath.library.Boundary, raw_mode: bool) -> Polygon:
return Polygon(vertices=boundary.xy[:-1].astype(float), return Polygon(
vertices=boundary.xy[:-1].astype(float),
layer=boundary.layer, layer=boundary.layer,
offset=numpy.zeros(2), offset=numpy.zeros(2),
annotations=_properties_to_annotations(boundary.properties), annotations=_properties_to_annotations(boundary.properties),
@ -356,62 +343,69 @@ def _boundary_to_polygon(boundary: klamath.library.Boundary, raw_mode: bool) ->
) )
def _subpatterns_to_refs(subpatterns: List[SubPattern]) -> List[klamath.library.Reference]: def _mrefs_to_grefs(refs: list[Ref]) -> list[klamath.library.Reference]:
refs = [] grefs = []
for subpat in subpatterns: for ref in refs:
if subpat.pattern is None: if ref.target is None:
continue continue
encoded_name = subpat.pattern.name.encode('ASCII') encoded_name = ref.target.encode('ASCII')
# Note: GDS mirrors first and rotates second # Note: GDS mirrors first and rotates second
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored) mirror_across_x, extra_angle = normalize_mirror(ref.mirrored)
rep = subpat.repetition rep = ref.repetition
angle_deg = numpy.rad2deg(subpat.rotation + extra_angle) % 360 angle_deg = numpy.rad2deg(ref.rotation + extra_angle) % 360
properties = _annotations_to_properties(subpat.annotations, 512) properties = _annotations_to_properties(ref.annotations, 512)
if isinstance(rep, Grid): if isinstance(rep, Grid):
b_vector = rep.b_vector if rep.b_vector is not None else numpy.zeros(2) b_vector = rep.b_vector if rep.b_vector is not None else numpy.zeros(2)
b_count = rep.b_count if rep.b_count is not None else 1 b_count = rep.b_count if rep.b_count is not None else 1
xy: NDArray[numpy.float64] = numpy.array(subpat.offset) + [ xy = numpy.array(ref.offset) + numpy.array([
[0, 0], [0.0, 0.0],
rep.a_vector * rep.a_count, rep.a_vector * rep.a_count,
b_vector * b_count, b_vector * b_count,
] ])
aref = klamath.library.Reference(struct_name=encoded_name, aref = klamath.library.Reference(
xy=numpy.round(xy).astype(int), struct_name=encoded_name,
colrow=(numpy.round(rep.a_count), numpy.round(rep.b_count)), xy=rint_cast(xy),
colrow=(numpy.rint(rep.a_count), numpy.rint(rep.b_count)),
angle_deg=angle_deg, angle_deg=angle_deg,
invert_y=mirror_across_x, invert_y=mirror_across_x,
mag=subpat.scale, mag=ref.scale,
properties=properties) properties=properties,
refs.append(aref) )
grefs.append(aref)
elif rep is None: elif rep is None:
ref = klamath.library.Reference(struct_name=encoded_name, sref = klamath.library.Reference(
xy=numpy.round([subpat.offset]).astype(int), struct_name=encoded_name,
xy=rint_cast([ref.offset]),
colrow=None, colrow=None,
angle_deg=angle_deg, angle_deg=angle_deg,
invert_y=mirror_across_x, invert_y=mirror_across_x,
mag=subpat.scale, mag=ref.scale,
properties=properties) properties=properties,
refs.append(ref) )
grefs.append(sref)
else: else:
new_srefs = [klamath.library.Reference(struct_name=encoded_name, new_srefs = [
xy=numpy.round([subpat.offset + dd]).astype(int), klamath.library.Reference(
struct_name=encoded_name,
xy=rint_cast([ref.offset + dd]),
colrow=None, colrow=None,
angle_deg=angle_deg, angle_deg=angle_deg,
invert_y=mirror_across_x, invert_y=mirror_across_x,
mag=subpat.scale, mag=ref.scale,
properties=properties) properties=properties,
)
for dd in rep.displacements] for dd in rep.displacements]
refs += new_srefs grefs += new_srefs
return refs return grefs
def _properties_to_annotations(properties: Dict[int, bytes]) -> annotations_t: def _properties_to_annotations(properties: dict[int, bytes]) -> annotations_t:
return {str(k): [v.decode()] for k, v in properties.items()} return {str(k): [v.decode()] for k, v in properties.items()}
def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -> Dict[int, bytes]: def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -> dict[int, bytes]:
cum_len = 0 cum_len = 0
props = {} props = {}
for key, vals in annotations.items(): for key, vals in annotations.items():
@ -434,60 +428,71 @@ def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -
def _shapes_to_elements( def _shapes_to_elements(
shapes: List[Shape], shapes: list[Shape],
polygonize_paths: bool = False, polygonize_paths: bool = False,
) -> List[klamath.elements.Element]: ) -> list[klamath.elements.Element]:
elements: List[klamath.elements.Element] = [] elements: list[klamath.elements.Element] = []
# Add a Boundary element for each shape, and Path elements if necessary # Add a Boundary element for each shape, and Path elements if necessary
for shape in shapes: for shape in shapes:
if shape.repetition is not None:
raise PatternError('Shape repetitions are not supported by GDS.'
' Please call library.wrap_repeated_shapes() before writing to file.')
layer, data_type = _mlayer2gds(shape.layer) layer, data_type = _mlayer2gds(shape.layer)
properties = _annotations_to_properties(shape.annotations, 128) properties = _annotations_to_properties(shape.annotations, 128)
if isinstance(shape, Path) and not polygonize_paths: if isinstance(shape, Path) and not polygonize_paths:
xy = numpy.round(shape.vertices + shape.offset).astype(int) xy = rint_cast(shape.vertices + shape.offset)
width = numpy.round(shape.width).astype(int) width = rint_cast(shape.width)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
extension: Tuple[int, int] extension: tuple[int, int]
if shape.cap == Path.Cap.SquareCustom and shape.cap_extensions is not None: if shape.cap == Path.Cap.SquareCustom and shape.cap_extensions is not None:
extension = tuple(shape.cap_extensions) # type: ignore extension = tuple(shape.cap_extensions) # type: ignore
else: else:
extension = (0, 0) extension = (0, 0)
path = klamath.elements.Path(layer=(layer, data_type), path = klamath.elements.Path(
layer=(layer, data_type),
xy=xy, xy=xy,
path_type=path_type, path_type=path_type,
width=width, width=int(width),
extension=extension, extension=extension,
properties=properties) properties=properties,
)
elements.append(path) elements.append(path)
elif isinstance(shape, Polygon): elif isinstance(shape, Polygon):
polygon = shape polygon = shape
xy_closed = numpy.empty((polygon.vertices.shape[0] + 1, 2), dtype=numpy.int32) xy_closed = numpy.empty((polygon.vertices.shape[0] + 1, 2), dtype=numpy.int32)
numpy.rint(polygon.vertices + polygon.offset, out=xy_closed[:-1], casting='unsafe') numpy.rint(polygon.vertices + polygon.offset, out=xy_closed[:-1], casting='unsafe')
xy_closed[-1] = xy_closed[0] xy_closed[-1] = xy_closed[0]
boundary = klamath.elements.Boundary(layer=(layer, data_type), boundary = klamath.elements.Boundary(
layer=(layer, data_type),
xy=xy_closed, xy=xy_closed,
properties=properties) properties=properties,
)
elements.append(boundary) elements.append(boundary)
else: else:
for polygon in shape.to_polygons(): for polygon in shape.to_polygons():
xy_closed = numpy.empty((polygon.vertices.shape[0] + 1, 2), dtype=numpy.int32) xy_closed = numpy.empty((polygon.vertices.shape[0] + 1, 2), dtype=numpy.int32)
numpy.rint(polygon.vertices + polygon.offset, out=xy_closed[:-1], casting='unsafe') numpy.rint(polygon.vertices + polygon.offset, out=xy_closed[:-1], casting='unsafe')
xy_closed[-1] = xy_closed[0] xy_closed[-1] = xy_closed[0]
boundary = klamath.elements.Boundary(layer=(layer, data_type), boundary = klamath.elements.Boundary(
layer=(layer, data_type),
xy=xy_closed, xy=xy_closed,
properties=properties) properties=properties,
)
elements.append(boundary) elements.append(boundary)
return elements return elements
def _labels_to_texts(labels: List[Label]) -> List[klamath.elements.Text]: def _labels_to_texts(labels: list[Label]) -> list[klamath.elements.Text]:
texts = [] texts = []
for label in labels: for label in labels:
properties = _annotations_to_properties(label.annotations, 128) properties = _annotations_to_properties(label.annotations, 128)
layer, text_type = _mlayer2gds(label.layer) layer, text_type = _mlayer2gds(label.layer)
xy = numpy.round([label.offset]).astype(int) xy = rint_cast([label.offset])
text = klamath.elements.Text(layer=(layer, text_type), text = klamath.elements.Text(
layer=(layer, text_type),
xy=xy, xy=xy,
string=label.string.encode('ASCII'), string=label.string.encode('ASCII'),
properties=properties, properties=properties,
@ -496,76 +501,18 @@ def _labels_to_texts(labels: List[Label]) -> List[klamath.elements.Text]:
invert_y=False, invert_y=False,
width=0, width=0,
path_type=0, path_type=0,
mag=1) mag=1,
)
texts.append(text) texts.append(text)
return texts return texts
def disambiguate_pattern_names(
patterns: Sequence[Pattern],
max_name_length: int = 32,
suffix_length: int = 6,
dup_warn_filter: Optional[Callable[[str], bool]] = None,
) -> None:
"""
Args:
patterns: List of patterns to disambiguate
max_name_length: Names longer than this will be truncated
suffix_length: Names which get truncated are truncated by this many extra characters. This is to
leave room for a suffix if one is necessary.
dup_warn_filter: (optional) Function for suppressing warnings about cell names changing. Receives
the cell name and returns `False` if the warning should be suppressed and `True` if it should
be displayed. Default displays all warnings.
"""
used_names = []
for pat in set(patterns):
# Shorten names which already exceed max-length
if len(pat.name) > max_name_length:
shortened_name = pat.name[:max_name_length - suffix_length]
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n'
+ f' shortening to "{shortened_name}" before generating suffix')
else:
shortened_name = pat.name
# Remove invalid characters
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
# Add a suffix that makes the name unique
i = 0
suffixed_name = sanitized_name
while suffixed_name in used_names or suffixed_name == '':
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
i += 1
if sanitized_name == '':
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
+ f' renaming to "{suffixed_name}"')
# Encode into a byte-string and perform some final checks
encoded_name = suffixed_name.encode('ASCII')
if len(encoded_name) == 0:
# Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
if len(encoded_name) > max_name_length:
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n'
+ f' originally "{pat.name}"')
pat.name = suffixed_name
used_names.append(suffixed_name)
def load_library( def load_library(
stream: BinaryIO, stream: IO[bytes],
tag: str,
is_secondary: Optional[Callable[[str], bool]] = None,
*, *,
full_load: bool = False, full_load: bool = False,
) -> Tuple[Library, Dict[str, Any]]: postprocess: Callable[[ILibraryView, str, Pattern], Pattern] | None = None
) -> tuple[LazyLibrary, dict[str, Any]]:
""" """
Scan a GDSII stream to determine what structures are present, and create Scan a GDSII stream to determine what structures are present, and create
a library from them. This enables deferred reading of structures a library from them. This enables deferred reading of structures
@ -577,33 +524,27 @@ def load_library(
The caller should leave the stream open while the library The caller should leave the stream open while the library
is still in use, since the library will need to access it is still in use, since the library will need to access it
in order to read the structure contents. in order to read the structure contents.
tag: Unique identifier that will be used to identify this data source
is_secondary: Function which takes a structure name and returns
True if the structure should only be used as a subcell
and not appear in the main Library interface.
Default always returns False.
full_load: If True, force all structures to be read immediately rather full_load: If True, force all structures to be read immediately rather
than as-needed. Since data is read sequentially from the file, than as-needed. Since data is read sequentially from the file, this
this will be faster than using the resulting library's will be faster than using the resulting library's `precache` method.
`precache` method. postprocess: If given, this function is used to post-process each
pattern *upon first load only*.
Returns: Returns:
Library object, allowing for deferred load of structures. LazyLibrary object, allowing for deferred load of structures.
Additional library info (dict, same format as from `read`). Additional library info (dict, same format as from `read`).
""" """
if is_secondary is None:
def is_secondary(k: str) -> bool:
return False
assert(is_secondary is not None)
stream.seek(0) stream.seek(0)
lib = Library() lib = LazyLibrary()
if full_load: if full_load:
# Full load approach (immediately load everything) # Full load approach (immediately load everything)
patterns, library_info = read(stream) patterns, library_info = read(stream)
for name, pattern in patterns.items(): for name, pattern in patterns.items():
lib.set_const(name, tag, pattern, secondary=is_secondary(name)) if postprocess is not None:
lib[name] = postprocess(lib, name, pattern)
else:
lib[name] = pattern
return lib, library_info return lib, library_info
# Normal approach (scan and defer load) # Normal approach (scan and defer load)
@ -615,21 +556,23 @@ def load_library(
def mkstruct(pos: int = pos, name: str = name) -> Pattern: def mkstruct(pos: int = pos, name: str = name) -> Pattern:
stream.seek(pos) stream.seek(pos)
return read_elements(stream, name, raw_mode=True) pat = read_elements(stream, raw_mode=True)
if postprocess is not None:
pat = postprocess(lib, name, pat)
return pat
lib.set_value(name, tag, mkstruct, secondary=is_secondary(name)) lib[name] = mkstruct
return lib, library_info return lib, library_info
def load_libraryfile( def load_libraryfile(
filename: Union[str, pathlib.Path], filename: str | pathlib.Path,
tag: str,
is_secondary: Optional[Callable[[str], bool]] = None,
*, *,
use_mmap: bool = True, use_mmap: bool = True,
full_load: bool = False, full_load: bool = False,
) -> Tuple[Library, Dict[str, Any]]: postprocess: Callable[[ILibraryView, str, Pattern], Pattern] | None = None
) -> tuple[LazyLibrary, dict[str, Any]]:
""" """
Wrapper for `load_library()` that takes a filename or path instead of a stream. Wrapper for `load_library()` that takes a filename or path instead of a stream.
@ -640,31 +583,65 @@ def load_libraryfile(
Args: Args:
path: filename or path to read from path: filename or path to read from
tag: Unique identifier for library, see `load_library`
is_secondary: Function specifying subcess, see `load_library`
use_mmap: If `True`, will attempt to memory-map the file instead use_mmap: If `True`, will attempt to memory-map the file instead
of buffering. In the case of gzipped files, the file of buffering. In the case of gzipped files, the file
is decompressed into a python `bytes` object in memory is decompressed into a python `bytes` object in memory
and reopened as an `io.BytesIO` stream. and reopened as an `io.BytesIO` stream.
full_load: If `True`, immediately loads all data. See `load_library`. full_load: If `True`, immediately loads all data. See `load_library`.
postprocess: Passed to `load_library`
Returns: Returns:
Library object, allowing for deferred load of structures. LazyLibrary object, allowing for deferred load of structures.
Additional library info (dict, same format as from `read`). Additional library info (dict, same format as from `read`).
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
stream: IO[bytes]
if is_gzipped(path): if is_gzipped(path):
if mmap: if mmap:
logger.info('Asked to mmap a gzipped file, reading into memory instead...') logger.info('Asked to mmap a gzipped file, reading into memory instead...')
base_stream = gzip.open(path, mode='rb') gz_stream = gzip.open(path, mode='rb')
stream = io.BytesIO(base_stream.read()) stream = io.BytesIO(gz_stream.read()) # type: ignore
else: else:
base_stream = gzip.open(path, mode='rb') gz_stream = gzip.open(path, mode='rb')
stream = io.BufferedReader(base_stream) stream = io.BufferedReader(gz_stream) # type: ignore
else: else:
base_stream = open(path, mode='rb')
if mmap: if mmap:
stream = mmap.mmap(base_stream.fileno(), 0, access=mmap.ACCESS_READ) base_stream = open(path, mode='rb', buffering=0)
stream = mmap.mmap(base_stream.fileno(), 0, access=mmap.ACCESS_READ) # type: ignore
else: else:
stream = io.BufferedReader(base_stream) stream = open(path, mode='rb')
return load_library(stream, tag, is_secondary) return load_library(stream, full_load=full_load, postprocess=postprocess)
def check_valid_names(
names: Iterable[str],
max_length: int = 32,
) -> None:
"""
Check all provided names to see if they're valid GDSII cell names.
Args:
names: Collection of names to check
max_length: Max allowed length
"""
allowed_chars = set(string.ascii_letters + string.digits + '_?$')
bad_chars = [
name for name in names
if not set(name).issubset(allowed_chars)
]
bad_lengths = [
name for name in names
if len(name) > max_length
]
if bad_chars:
logger.error('Names contain invalid characters:\n' + pformat(bad_chars))
if bad_lengths:
logger.error(f'Names too long (>{max_length}:\n' + pformat(bad_chars))
if bad_chars or bad_lengths:
raise LibraryError('Library contains invalid names, see log above')

View File

@ -1,2 +0,0 @@
# FOr backwards compatibility
from .gdsii import *

View File

@ -10,24 +10,26 @@ Note that OASIS references follow the same convention as `masque`,
Scaling, rotation, and mirroring apply to individual instances, not grid Scaling, rotation, and mirroring apply to individual instances, not grid
vectors or offsets. vectors or offsets.
Notes:
* Gzip modification time is set to 0 (start of current epoch, usually 1970-01-01)
""" """
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional from typing import Any, Callable, Iterable, IO, Mapping, cast, Sequence
import re
import io
import copy
import base64
import struct
import logging import logging
import pathlib import pathlib
import gzip import gzip
import string
from pprint import pformat
import numpy import numpy
from numpy.typing import ArrayLike, NDArray
import fatamorgana import fatamorgana
import fatamorgana.records as fatrec import fatamorgana.records as fatrec
from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringReference from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringReference
from .utils import clean_pattern_vertices, is_gzipped from .utils import is_gzipped, tmpfile
from .. import Pattern, SubPattern, PatternError, Label, Shape from .. import Pattern, Ref, PatternError, LibraryError, Label, Shape
from ..library import Library, ILibrary
from ..shapes import Polygon, Path, Circle from ..shapes import Polygon, Path, Circle
from ..repetition import Grid, Arbitrary, Repetition from ..repetition import Grid, Arbitrary, Repetition
from ..utils import layer_t, normalize_mirror, annotations_t from ..utils import layer_t, normalize_mirror, annotations_t
@ -36,7 +38,7 @@ from ..utils import layer_t, normalize_mirror, annotations_t
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.warning('OASIS support is experimental and mostly untested!') logger.warning('OASIS support is experimental!')
path_cap_map = { path_cap_map = {
@ -45,21 +47,23 @@ path_cap_map = {
PathExtensionScheme.Arbitrary: Path.Cap.SquareCustom, PathExtensionScheme.Arbitrary: Path.Cap.SquareCustom,
} }
#TODO implement more shape types? #TODO implement more shape types in OASIS?
def rint_cast(val: ArrayLike) -> NDArray[numpy.int64]:
return numpy.rint(val).astype(numpy.int64)
def build( def build(
patterns: Union[Pattern, Sequence[Pattern]], library: Mapping[str, Pattern], # NOTE: Pattern here should be treated as immutable!
units_per_micron: int, units_per_micron: int,
layer_map: Optional[Dict[str, Union[int, Tuple[int, int]]]] = None, layer_map: dict[str, int | tuple[int, int]] | None = None,
*, *,
modify_originals: bool = False, annotations: annotations_t | None = None,
disambiguate_func: Optional[Callable[[Iterable[Pattern]], None]] = None,
annotations: Optional[annotations_t] = None,
) -> fatamorgana.OasisLayout: ) -> fatamorgana.OasisLayout:
""" """
Convert a `Pattern` or list of patterns to an OASIS stream, writing patterns Convert a collection of {name: Pattern} pairs to an OASIS stream, writing patterns
as OASIS cells, subpatterns as Placement records, and other shapes and labels as OASIS cells, refs as Placement records, and mapping other shapes and labels
mapped to equivalent record types (Polygon, Path, Circle, Text). to equivalent record types (Polygon, Path, Circle, Text).
Other shape types may be converted to polygons if no equivalent Other shape types may be converted to polygons if no equivalent
record type exists (or is not implemented here yet). record type exists (or is not implemented here yet).
@ -71,14 +75,17 @@ def build(
If a layer map is provided, layer strings will be converted If a layer map is provided, layer strings will be converted
automatically, and layer names will be written to the file. automatically, and layer names will be written to the file.
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()` Other functions you may want to call:
prior to calling this function. - `masque.file.oasis.check_valid_names(library.keys())` to check for invalid names
- `library.dangling_refs()` to check for references to missing patterns
- `pattern.polygonize()` for any patterns with shapes other
than `masque.shapes.Polygon`, `masque.shapes.Path`, or `masque.shapes.Circle`
Args: Args:
patterns: A Pattern or list of patterns to convert. library: A {name: Pattern} mapping of patterns to write.
units_per_micron: Written into the OASIS file, number of grid steps per micrometer. units_per_micron: Written into the OASIS file, number of grid steps per micrometer.
All distances are assumed to be an integer multiple of the grid step, and are stored as such. All distances are assumed to be an integer multiple of the grid step, and are stored as such.
layer_map: Dictionary which translates layer names into layer numbers. If this argument is layer_map: dictionary which translates layer names into layer numbers. If this argument is
provided, input shapes and labels are allowed to have layer names instead of numbers. provided, input shapes and labels are allowed to have layer names instead of numbers.
It is assumed that geometry and text share the same layer names, and each name is It is assumed that geometry and text share the same layer names, and each name is
assigned only to a single layer (not a range). assigned only to a single layer (not a range).
@ -86,31 +93,23 @@ def build(
into numbers, omit this argument, and manually generate the required into numbers, omit this argument, and manually generate the required
`fatamorgana.records.LayerName` entries. `fatamorgana.records.LayerName` entries.
Default is an empty dict (no names provided). Default is an empty dict (no names provided).
modify_originals: If `True`, the original pattern is modified as part of the writing
process. Otherwise, a copy is made and `deepunlock()`-ed.
Default `False`.
disambiguate_func: Function which takes a list of patterns and alters them
to make their names valid and unique. Default is `disambiguate_pattern_names`.
annotations: dictionary of key-value pairs which are saved as library-level properties annotations: dictionary of key-value pairs which are saved as library-level properties
Returns: Returns:
`fatamorgana.OasisLayout` `fatamorgana.OasisLayout`
""" """
if isinstance(patterns, Pattern): if not isinstance(library, ILibrary):
patterns = [patterns] if isinstance(library, dict):
library = Library(library)
else:
library = Library(dict(library))
if layer_map is None: if layer_map is None:
layer_map = {} layer_map = {}
if disambiguate_func is None:
disambiguate_func = disambiguate_pattern_names
if annotations is None: if annotations is None:
annotations = {} annotations = {}
if not modify_originals:
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
# Create library # Create library
lib = fatamorgana.OasisLayout(unit=units_per_micron, validation=None) lib = fatamorgana.OasisLayout(unit=units_per_micron, validation=None)
lib.properties = annotations_to_properties(annotations) lib.properties = annotations_to_properties(annotations)
@ -119,44 +118,38 @@ def build(
for name, layer_num in layer_map.items(): for name, layer_num in layer_map.items():
layer, data_type = _mlayer2oas(layer_num) layer, data_type = _mlayer2oas(layer_num)
lib.layers += [ lib.layers += [
fatrec.LayerName(nstring=name, fatrec.LayerName(
nstring=name,
layer_interval=(layer, layer), layer_interval=(layer, layer),
type_interval=(data_type, data_type), type_interval=(data_type, data_type),
is_textlayer=tt) is_textlayer=tt,
)
for tt in (True, False)] for tt in (True, False)]
def layer2oas(mlayer: layer_t) -> Tuple[int, int]: def layer2oas(mlayer: layer_t) -> tuple[int, int]:
assert(layer_map is not None) assert layer_map is not None
layer_num = layer_map[mlayer] if isinstance(mlayer, str) else mlayer layer_num = layer_map[mlayer] if isinstance(mlayer, str) else mlayer
return _mlayer2oas(layer_num) return _mlayer2oas(layer_num)
else: else:
layer2oas = _mlayer2oas layer2oas = _mlayer2oas
# Get a dict of id(pattern) -> pattern
patterns_by_id = {id(pattern): pattern for pattern in patterns}
for pattern in patterns:
for i, p in pattern.referenced_patterns_by_id().items():
patterns_by_id[i] = p
disambiguate_func(patterns_by_id.values())
# Now create a structure for each pattern # Now create a structure for each pattern
for pat in patterns_by_id.values(): for name, pat in library.items():
structure = fatamorgana.Cell(name=pat.name) structure = fatamorgana.Cell(name=name)
lib.cells.append(structure) lib.cells.append(structure)
structure.properties += annotations_to_properties(pat.annotations) structure.properties += annotations_to_properties(pat.annotations)
structure.geometry += _shapes_to_elements(pat.shapes, layer2oas) structure.geometry += _shapes_to_elements(pat.shapes, layer2oas)
structure.geometry += _labels_to_texts(pat.labels, layer2oas) structure.geometry += _labels_to_texts(pat.labels, layer2oas)
structure.placements += _subpatterns_to_placements(pat.subpatterns) structure.placements += _refs_to_placements(pat.refs)
return lib return lib
def write( def write(
patterns: Union[Sequence[Pattern], Pattern], library: Mapping[str, Pattern], # NOTE: Pattern here should be treated as immutable!
stream: io.BufferedIOBase, stream: IO[bytes],
*args, *args,
**kwargs, **kwargs,
) -> None: ) -> None:
@ -165,18 +158,18 @@ def write(
for details. for details.
Args: Args:
patterns: A Pattern or list of patterns to write to file. library: A {name: Pattern} mapping of patterns to write.
stream: Stream to write to. stream: Stream to write to.
*args: passed to `oasis.build()` *args: passed to `oasis.build()`
**kwargs: passed to `oasis.build()` **kwargs: passed to `oasis.build()`
""" """
lib = build(patterns, *args, **kwargs) lib = build(library, *args, **kwargs)
lib.write(stream) lib.write(stream)
def writefile( def writefile(
patterns: Union[Sequence[Pattern], Pattern], library: Mapping[str, Pattern], # NOTE: Pattern here should be treated as immutable!
filename: Union[str, pathlib.Path], filename: str | pathlib.Path,
*args, *args,
**kwargs, **kwargs,
) -> None: ) -> None:
@ -186,26 +179,33 @@ def writefile(
Will automatically compress the file if it has a .gz suffix. Will automatically compress the file if it has a .gz suffix.
Args: Args:
patterns: `Pattern` or list of patterns to save library: A {name: Pattern} mapping of patterns to write.
filename: Filename to save to. filename: Filename to save to.
*args: passed to `oasis.write` *args: passed to `oasis.write`
**kwargs: passed to `oasis.write` **kwargs: passed to `oasis.write`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if path.suffix == '.gz':
open_func: Callable = gzip.open
else:
open_func = open
with io.BufferedWriter(open_func(path, mode='wb')) as stream: with tmpfile(path) as base_stream:
write(patterns, stream, *args, **kwargs) streams: tuple[Any, ...] = (base_stream,)
if path.suffix == '.gz':
stream = cast(IO[bytes], gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
streams += (stream,)
else:
stream = base_stream
try:
write(library, stream, *args, **kwargs)
finally:
for ss in streams:
ss.close()
def readfile( def readfile(
filename: Union[str, pathlib.Path], filename: str | pathlib.Path,
*args, *args,
**kwargs, **kwargs,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: ) -> tuple[Library, dict[str, Any]]:
""" """
Wrapper for `oasis.read()` that takes a filename or path instead of a stream. Wrapper for `oasis.read()` that takes a filename or path instead of a stream.
@ -222,19 +222,18 @@ def readfile(
else: else:
open_func = open open_func = open
with io.BufferedReader(open_func(path, mode='rb')) as stream: with open_func(path, mode='rb') as stream:
results = read(stream, *args, **kwargs) results = read(stream, *args, **kwargs)
return results return results
def read( def read(
stream: io.BufferedIOBase, stream: IO[bytes],
clean_vertices: bool = True, ) -> tuple[Library, dict[str, Any]]:
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
""" """
Read a OASIS file and translate it into a dict of Pattern objects. OASIS cells are Read a OASIS file and translate it into a dict of Pattern objects. OASIS cells are
translated into Pattern objects; Polygons are translated into polygons, and Placements translated into Pattern objects; Polygons are translated into polygons, and Placements
are translated into SubPattern objects. are translated into Ref objects.
Additional library info is returned in a dict, containing: Additional library info is returned in a dict, containing:
'units_per_micrometer': number of database units per micrometer (all values are in database units) 'units_per_micrometer': number of database units per micrometer (all values are in database units)
@ -243,18 +242,15 @@ def read(
Args: Args:
stream: Stream to read from. stream: Stream to read from.
clean_vertices: If `True`, remove any redundant vertices when loading polygons.
The cleaning process removes any polygons with zero area or <3 vertices.
Default `True`.
Returns: Returns:
- Dict of `pattern_name`:`Pattern`s generated from OASIS cells - dict of `pattern_name`:`Pattern`s generated from OASIS cells
- Dict of OASIS library info - dict of OASIS library info
""" """
lib = fatamorgana.OasisLayout.read(stream) lib = fatamorgana.OasisLayout.read(stream)
library_info: Dict[str, Any] = { library_info: dict[str, Any] = {
'units_per_micrometer': lib.unit, 'units_per_micrometer': lib.unit,
'annotations': properties_to_annotations(lib.properties, lib.propnames, lib.propstrings), 'annotations': properties_to_annotations(lib.properties, lib.propnames, lib.propstrings),
} }
@ -264,32 +260,37 @@ def read(
layer_map[str(layer_name.nstring)] = layer_name layer_map[str(layer_name.nstring)] = layer_name
library_info['layer_map'] = layer_map library_info['layer_map'] = layer_map
patterns = [] mlib = Library()
for cell in lib.cells: for cell in lib.cells:
if isinstance(cell.name, int): if isinstance(cell.name, int):
cell_name = lib.cellnames[cell.name].nstring.string cell_name = lib.cellnames[cell.name].nstring.string
else: else:
cell_name = cell.name.string cell_name = cell.name.string
pat = Pattern(name=cell_name) pat = Pattern()
for element in cell.geometry: for element in cell.geometry:
if isinstance(element, fatrec.XElement): if isinstance(element, fatrec.XElement):
logger.warning('Skipping XElement record') logger.warning('Skipping XElement record')
# note XELEMENT has no repetition # note XELEMENT has no repetition
continue continue
assert(not isinstance(element.repetition, fatamorgana.ReuseRepetition)) assert not isinstance(element.repetition, fatamorgana.ReuseRepetition)
repetition = repetition_fata2masq(element.repetition) repetition = repetition_fata2masq(element.repetition)
# Switch based on element type: # Switch based on element type:
if isinstance(element, fatrec.Polygon): if isinstance(element, fatrec.Polygon):
vertices = numpy.cumsum(numpy.vstack(((0, 0), element.get_point_list())), axis=0) # Drop last point (`fatamorgana` returns explicity closed list; we use implicit close)
# also need `cumsum` to convert from deltas to locations
vertices = numpy.cumsum(numpy.vstack(((0, 0), element.get_point_list()[:-1])), axis=0)
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings) annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
poly = Polygon(vertices=vertices, poly = Polygon(
vertices=vertices,
layer=element.get_layer_tuple(), layer=element.get_layer_tuple(),
offset=element.get_xy(), offset=element.get_xy(),
annotations=annotations, annotations=annotations,
repetition=repetition) repetition=repetition,
)
pat.shapes.append(poly) pat.shapes.append(poly)
@ -302,20 +303,24 @@ def read(
raise Exception('masque does not support multiple cap types on a single path.') # TODO handle multiple cap types raise Exception('masque does not support multiple cap types on a single path.') # TODO handle multiple cap types
cap = cap_start cap = cap_start
path_args: Dict[str, Any] = {} path_args: dict[str, Any] = {}
if cap == Path.Cap.SquareCustom: if cap == Path.Cap.SquareCustom:
path_args['cap_extensions'] = numpy.array((element.get_extension_start()[1], path_args['cap_extensions'] = numpy.array((
element.get_extension_end()[1])) element.get_extension_start()[1],
element.get_extension_end()[1],
))
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings) annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
path = Path(vertices=vertices, path = Path(
vertices=vertices,
layer=element.get_layer_tuple(), layer=element.get_layer_tuple(),
offset=element.get_xy(), offset=element.get_xy(),
repetition=repetition, repetition=repetition,
annotations=annotations, annotations=annotations,
width=element.get_half_width() * 2, width=element.get_half_width() * 2,
cap=cap, cap=cap,
**path_args) **path_args,
)
pat.shapes.append(path) pat.shapes.append(path)
@ -323,7 +328,8 @@ def read(
width = element.get_width() width = element.get_width()
height = element.get_height() height = element.get_height()
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings) annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
rect = Polygon(layer=element.get_layer_tuple(), rect = Polygon(
layer=element.get_layer_tuple(),
offset=element.get_xy(), offset=element.get_xy(),
repetition=repetition, repetition=repetition,
vertices=numpy.array(((0, 0), (1, 0), (1, 1), (0, 1))) * (width, height), vertices=numpy.array(((0, 0), (1, 0), (1, 1), (0, 1))) * (width, height),
@ -357,7 +363,8 @@ def read(
vertices[2, 0] -= b vertices[2, 0] -= b
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings) annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
trapz = Polygon(layer=element.get_layer_tuple(), trapz = Polygon(
layer=element.get_layer_tuple(),
offset=element.get_xy(), offset=element.get_xy(),
repetition=repetition, repetition=repetition,
vertices=vertices, vertices=vertices,
@ -412,7 +419,8 @@ def read(
vertices[0, 1] += width vertices[0, 1] += width
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings) annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
ctrapz = Polygon(layer=element.get_layer_tuple(), ctrapz = Polygon(
layer=element.get_layer_tuple(),
offset=element.get_xy(), offset=element.get_xy(),
repetition=repetition, repetition=repetition,
vertices=vertices, vertices=vertices,
@ -422,11 +430,13 @@ def read(
elif isinstance(element, fatrec.Circle): elif isinstance(element, fatrec.Circle):
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings) annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
circle = Circle(layer=element.get_layer_tuple(), circle = Circle(
layer=element.get_layer_tuple(),
offset=element.get_xy(), offset=element.get_xy(),
repetition=repetition, repetition=repetition,
annotations=annotations, annotations=annotations,
radius=float(element.get_radius())) radius=float(element.get_radius()),
)
pat.shapes.append(circle) pat.shapes.append(circle)
elif isinstance(element, fatrec.Text): elif isinstance(element, fatrec.Text):
@ -436,11 +446,13 @@ def read(
string = lib.textstrings[str_or_ref].string string = lib.textstrings[str_or_ref].string
else: else:
string = str_or_ref.string string = str_or_ref.string
label = Label(layer=element.get_layer_tuple(), label = Label(
layer=element.get_layer_tuple(),
offset=element.get_xy(), offset=element.get_xy(),
repetition=repetition, repetition=repetition,
annotations=annotations, annotations=annotations,
string=string) string=string,
)
pat.labels.append(label) pat.labels.append(label)
else: else:
@ -448,26 +460,14 @@ def read(
continue continue
for placement in cell.placements: for placement in cell.placements:
pat.subpatterns.append(_placement_to_subpat(placement, lib)) pat.refs.append(_placement_to_ref(placement, lib))
if clean_vertices: mlib[cell_name] = pat
clean_pattern_vertices(pat)
patterns.append(pat)
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries return mlib, library_info
# according to the subpattern.identifier (which is deleted after use).
patterns_dict = dict(((p.name, p) for p in patterns))
for p in patterns_dict.values():
for sp in p.subpatterns:
ident = sp.identifier[0]
name = ident if isinstance(ident, str) else lib.cellnames[ident].nstring.string
sp.pattern = patterns_dict[name]
del sp.identifier
return patterns_dict, library_info
def _mlayer2oas(mlayer: layer_t) -> Tuple[int, int]: def _mlayer2oas(mlayer: layer_t) -> tuple[int, int]:
""" Helper to turn a layer tuple-or-int into a layer and datatype""" """ Helper to turn a layer tuple-or-int into a layer and datatype"""
if isinstance(mlayer, int): if isinstance(mlayer, int):
layer = mlayer layer = mlayer
@ -479,97 +479,103 @@ def _mlayer2oas(mlayer: layer_t) -> Tuple[int, int]:
else: else:
data_type = 0 data_type = 0
else: else:
raise PatternError(f'Invalid layer for OASIS: {layer}. Note that OASIS layers cannot be ' raise PatternError(f'Invalid layer for OASIS: {mlayer}. Note that OASIS layers cannot be '
f'strings unless a layer map is provided.') f'strings unless a layer map is provided.')
return layer, data_type return layer, data_type
def _placement_to_subpat(placement: fatrec.Placement, lib: fatamorgana.OasisLayout) -> SubPattern: def _placement_to_ref(placement: fatrec.Placement, lib: fatamorgana.OasisLayout) -> Ref:
""" """
Helper function to create a SubPattern from a placment. Sets subpat.pattern to None Helper function to create a Ref from a placment. Sets ref.target to the placement name.
and sets the instance .identifier to (struct_name,).
""" """
assert(not isinstance(placement.repetition, fatamorgana.ReuseRepetition)) assert not isinstance(placement.repetition, fatamorgana.ReuseRepetition)
xy = numpy.array((placement.x, placement.y)) xy = numpy.array((placement.x, placement.y))
mag = placement.magnification if placement.magnification is not None else 1 mag = placement.magnification if placement.magnification is not None else 1
pname = placement.get_name() pname = placement.get_name()
name = pname if isinstance(pname, int) else pname.string name: int | str = pname if isinstance(pname, int) else pname.string # TODO deal with referenced names
annotations = properties_to_annotations(placement.properties, lib.propnames, lib.propstrings) annotations = properties_to_annotations(placement.properties, lib.propnames, lib.propstrings)
if placement.angle is None: if placement.angle is None:
rotation = 0 rotation = 0
else: else:
rotation = numpy.deg2rad(float(placement.angle)) rotation = numpy.deg2rad(float(placement.angle))
subpat = SubPattern(offset=xy, ref = Ref(
pattern=None, target=name,
offset=xy,
mirrored=(placement.flip, False), mirrored=(placement.flip, False),
rotation=rotation, rotation=rotation,
scale=float(mag), scale=float(mag),
identifier=(name,),
repetition=repetition_fata2masq(placement.repetition), repetition=repetition_fata2masq(placement.repetition),
annotations=annotations) annotations=annotations,
return subpat )
return ref
def _subpatterns_to_placements( def _refs_to_placements(
subpatterns: List[SubPattern], refs: list[Ref],
) -> List[fatrec.Placement]: ) -> list[fatrec.Placement]:
refs = [] placements = []
for subpat in subpatterns: for ref in refs:
if subpat.pattern is None: if ref.target is None:
continue continue
# Note: OASIS mirrors first and rotates second # Note: OASIS mirrors first and rotates second
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored) mirror_across_x, extra_angle = normalize_mirror(ref.mirrored)
frep, rep_offset = repetition_masq2fata(subpat.repetition) frep, rep_offset = repetition_masq2fata(ref.repetition)
offset = numpy.round(subpat.offset + rep_offset).astype(int) offset = rint_cast(ref.offset + rep_offset)
angle = numpy.rad2deg(subpat.rotation + extra_angle) % 360 angle = numpy.rad2deg(ref.rotation + extra_angle) % 360
ref = fatrec.Placement( placement = fatrec.Placement(
name=subpat.pattern.name, name=ref.target,
flip=mirror_across_x, flip=mirror_across_x,
angle=angle, angle=angle,
magnification=subpat.scale, magnification=ref.scale,
properties=annotations_to_properties(subpat.annotations), properties=annotations_to_properties(ref.annotations),
x=offset[0], x=offset[0],
y=offset[1], y=offset[1],
repetition=frep) repetition=frep,
)
refs.append(ref) placements.append(placement)
return refs return placements
def _shapes_to_elements( def _shapes_to_elements(
shapes: List[Shape], shapes: list[Shape],
layer2oas: Callable[[layer_t], Tuple[int, int]], layer2oas: Callable[[layer_t], tuple[int, int]],
) -> List[Union[fatrec.Polygon, fatrec.Path, fatrec.Circle]]: ) -> list[fatrec.Polygon | fatrec.Path | fatrec.Circle]:
# Add a Polygon record for each shape, and Path elements if necessary # Add a Polygon record for each shape, and Path elements if necessary
elements: List[Union[fatrec.Polygon, fatrec.Path, fatrec.Circle]] = [] elements: list[fatrec.Polygon | fatrec.Path | fatrec.Circle] = []
for shape in shapes: for shape in shapes:
layer, datatype = layer2oas(shape.layer) layer, datatype = layer2oas(shape.layer)
repetition, rep_offset = repetition_masq2fata(shape.repetition) repetition, rep_offset = repetition_masq2fata(shape.repetition)
properties = annotations_to_properties(shape.annotations) properties = annotations_to_properties(shape.annotations)
if isinstance(shape, Circle): if isinstance(shape, Circle):
offset = numpy.round(shape.offset + rep_offset).astype(int) offset = rint_cast(shape.offset + rep_offset)
radius = numpy.round(shape.radius).astype(int) radius = rint_cast(shape.radius)
circle = fatrec.Circle(layer=layer, circle = fatrec.Circle(
layer=layer,
datatype=datatype, datatype=datatype,
radius=radius, radius=cast(int, radius),
x=offset[0], x=offset[0],
y=offset[1], y=offset[1],
properties=properties, properties=properties,
repetition=repetition) repetition=repetition,
)
elements.append(circle) elements.append(circle)
elif isinstance(shape, Path): elif isinstance(shape, Path):
xy = numpy.round(shape.offset + shape.vertices[0] + rep_offset).astype(int) xy = rint_cast(shape.offset + shape.vertices[0] + rep_offset)
deltas = numpy.round(numpy.diff(shape.vertices, axis=0)).astype(int) deltas = rint_cast(numpy.diff(shape.vertices, axis=0))
half_width = numpy.round(shape.width / 2).astype(int) half_width = rint_cast(shape.width / 2)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
extension_start = (path_type, shape.cap_extensions[0] if shape.cap_extensions is not None else None) extension_start = (path_type, shape.cap_extensions[0] if shape.cap_extensions is not None else None)
extension_end = (path_type, shape.cap_extensions[1] if shape.cap_extensions is not None else None) extension_end = (path_type, shape.cap_extensions[1] if shape.cap_extensions is not None else None)
path = fatrec.Path(layer=layer, path = fatrec.Path(
layer=layer,
datatype=datatype, datatype=datatype,
point_list=deltas, point_list=cast(Sequence[Sequence[int]], deltas),
half_width=half_width, half_width=cast(int, half_width),
x=xy[0], x=xy[0],
y=xy[1], y=xy[1],
extension_start=extension_start, # TODO implement multiple cap types? extension_start=extension_start, # TODO implement multiple cap types?
@ -580,81 +586,56 @@ def _shapes_to_elements(
elements.append(path) elements.append(path)
else: else:
for polygon in shape.to_polygons(): for polygon in shape.to_polygons():
xy = numpy.round(polygon.offset + polygon.vertices[0] + rep_offset).astype(int) xy = rint_cast(polygon.offset + polygon.vertices[0] + rep_offset)
points = numpy.round(numpy.diff(polygon.vertices, axis=0)).astype(int) points = rint_cast(numpy.diff(polygon.vertices, axis=0))
elements.append(fatrec.Polygon(layer=layer, elements.append(fatrec.Polygon(
layer=layer,
datatype=datatype, datatype=datatype,
x=xy[0], x=xy[0],
y=xy[1], y=xy[1],
point_list=points, point_list=cast(list[list[int]], points),
properties=properties, properties=properties,
repetition=repetition)) repetition=repetition,
))
return elements return elements
def _labels_to_texts( def _labels_to_texts(
labels: List[Label], labels: list[Label],
layer2oas: Callable[[layer_t], Tuple[int, int]], layer2oas: Callable[[layer_t], tuple[int, int]],
) -> List[fatrec.Text]: ) -> list[fatrec.Text]:
texts = [] texts = []
for label in labels: for label in labels:
layer, datatype = layer2oas(label.layer) layer, datatype = layer2oas(label.layer)
repetition, rep_offset = repetition_masq2fata(label.repetition) repetition, rep_offset = repetition_masq2fata(label.repetition)
xy = numpy.round(label.offset + rep_offset).astype(int) xy = rint_cast(label.offset + rep_offset)
properties = annotations_to_properties(label.annotations) properties = annotations_to_properties(label.annotations)
texts.append(fatrec.Text(layer=layer, texts.append(fatrec.Text(
layer=layer,
datatype=datatype, datatype=datatype,
x=xy[0], x=xy[0],
y=xy[1], y=xy[1],
string=label.string, string=label.string,
properties=properties, properties=properties,
repetition=repetition)) repetition=repetition,
))
return texts return texts
def disambiguate_pattern_names(
patterns,
dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
) -> None:
used_names = []
for pat in patterns:
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name)
i = 0
suffixed_name = sanitized_name
while suffixed_name in used_names or suffixed_name == '':
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
i += 1
if sanitized_name == '':
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
+ f' renaming to "{suffixed_name}"')
if len(suffixed_name) == 0:
# Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
pat.name = suffixed_name
used_names.append(suffixed_name)
def repetition_fata2masq( def repetition_fata2masq(
rep: Union[fatamorgana.GridRepetition, fatamorgana.ArbitraryRepetition, None], rep: fatamorgana.GridRepetition | fatamorgana.ArbitraryRepetition | None,
) -> Optional[Repetition]: ) -> Repetition | None:
mrep: Optional[Repetition] mrep: Repetition | None
if isinstance(rep, fatamorgana.GridRepetition): if isinstance(rep, fatamorgana.GridRepetition):
mrep = Grid(a_vector=rep.a_vector, mrep = Grid(a_vector=rep.a_vector,
b_vector=rep.b_vector, b_vector=rep.b_vector,
a_count=rep.a_count, a_count=rep.a_count,
b_count=rep.b_count) b_count=rep.b_count)
elif isinstance(rep, fatamorgana.ArbitraryRepetition): elif isinstance(rep, fatamorgana.ArbitraryRepetition):
displacements = numpy.cumsum(numpy.column_stack((rep.x_displacements, displacements = numpy.cumsum(numpy.column_stack((
rep.y_displacements)), axis=0) rep.x_displacements,
rep.y_displacements,
)), axis=0)
displacements = numpy.vstack(([0, 0], displacements)) displacements = numpy.vstack(([0, 0], displacements))
mrep = Arbitrary(displacements) mrep = Arbitrary(displacements)
elif rep is None: elif rep is None:
@ -663,37 +644,37 @@ def repetition_fata2masq(
def repetition_masq2fata( def repetition_masq2fata(
rep: Optional[Repetition], rep: Repetition | None,
) -> Tuple[Union[fatamorgana.GridRepetition, ) -> tuple[
fatamorgana.ArbitraryRepetition, fatamorgana.GridRepetition | fatamorgana.ArbitraryRepetition | None,
None], tuple[int, int]
Tuple[int, int]]: ]:
frep: Union[fatamorgana.GridRepetition, fatamorgana.ArbitraryRepetition, None] frep: fatamorgana.GridRepetition | fatamorgana.ArbitraryRepetition | None
if isinstance(rep, Grid): if isinstance(rep, Grid):
a_vector = rint_cast(rep.a_vector) a_vector = rint_cast(rep.a_vector)
b_vector = rint_cast(rep.b_vector) if rep.b_vector is not None else None b_vector = rint_cast(rep.b_vector) if rep.b_vector is not None else None
a_count = rint_cast(rep.a_count) a_count = rint_cast(rep.a_count)
b_count = rint_cast(rep.b_count) if rep.b_count is not None else None b_count = rint_cast(rep.b_count) if rep.b_count is not None else None
frep = fatamorgana.GridRepetition( frep = fatamorgana.GridRepetition(
a_vector=a_vector, a_vector=cast(list[int], a_vector),
b_vector=b_vector, b_vector=cast(list[int] | None, b_vector),
a_count=a_count, a_count=cast(int, a_count),
b_count=b_count, b_count=cast(int | None, b_count),
) )
offset = (0, 0) offset = (0, 0)
elif isinstance(rep, Arbitrary): elif isinstance(rep, Arbitrary):
diffs = numpy.diff(rep.displacements, axis=0) diffs = numpy.diff(rep.displacements, axis=0)
diff_ints = rint_cast(diffs) diff_ints = rint_cast(diffs)
frep = fatamorgana.ArbitraryRepetition(diff_ints[:, 0], diff_ints[:, 1]) frep = fatamorgana.ArbitraryRepetition(diff_ints[:, 0], diff_ints[:, 1]) # type: ignore
offset = rep.displacements[0, :] offset = rep.displacements[0, :]
else: else:
assert(rep is None) assert rep is None
frep = None frep = None
offset = (0, 0) offset = (0, 0)
return frep, offset return frep, offset
def annotations_to_properties(annotations: annotations_t) -> List[fatrec.Property]: def annotations_to_properties(annotations: annotations_t) -> list[fatrec.Property]:
#TODO determine is_standard based on key? #TODO determine is_standard based on key?
properties = [] properties = []
for key, values in annotations.items(): for key, values in annotations.items():
@ -704,20 +685,20 @@ def annotations_to_properties(annotations: annotations_t) -> List[fatrec.Propert
def properties_to_annotations( def properties_to_annotations(
properties: List[fatrec.Property], properties: list[fatrec.Property],
propnames: Dict[int, NString], propnames: dict[int, NString],
propstrings: Dict[int, AString], propstrings: dict[int, AString],
) -> annotations_t: ) -> annotations_t:
annotations = {} annotations = {}
for proprec in properties: for proprec in properties:
assert(proprec.name is not None) assert proprec.name is not None
if isinstance(proprec.name, int): if isinstance(proprec.name, int):
key = propnames[proprec.name].string key = propnames[proprec.name].string
else: else:
key = proprec.name.string key = proprec.name.string
values: List[Union[str, float, int]] = [] values: list[str | float | int] = []
assert(proprec.values is not None) assert proprec.values is not None
for value in proprec.values: for value in proprec.values:
if isinstance(value, (float, int)): if isinstance(value, (float, int)):
values.append(value) values.append(value)
@ -735,3 +716,25 @@ def properties_to_annotations(
properties = [fatrec.Property(key, vals, is_standard=False) properties = [fatrec.Property(key, vals, is_standard=False)
for key, vals in annotations.items()] for key, vals in annotations.items()]
return properties return properties
def check_valid_names(
names: Iterable[str],
) -> None:
"""
Check all provided names to see if they're valid GDSII cell names.
Args:
names: Collection of names to check
max_length: Max allowed length
"""
allowed_chars = set(string.ascii_letters + string.digits + string.punctuation + ' ')
bad_chars = [
name for name in names
if not set(name).issubset(allowed_chars)
]
if bad_chars:
raise LibraryError('Names contain invalid characters:\n' + pformat(bad_chars))

View File

@ -1,580 +0,0 @@
"""
GDSII file format readers and writers using python-gdsii
Note that GDSII references follow the same convention as `masque`,
with this order of operations:
1. Mirroring
2. Rotation
3. Scaling
4. Offset and array expansion (no mirroring/rotation/scaling applied to offsets)
Scaling, rotation, and mirroring apply to individual instances, not grid
vectors or offsets.
Notes:
* absolute positioning is not supported
* PLEX is not supported
* ELFLAGS are not supported
* GDS does not support library- or structure-level annotations
"""
from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
from typing import Sequence
import re
import io
import copy
import base64
import struct
import logging
import pathlib
import gzip
import numpy
from numpy.typing import NDArray, ArrayLike
# python-gdsii
import gdsii.library #type: ignore
import gdsii.structure #type: ignore
import gdsii.elements #type: ignore
from .utils import clean_pattern_vertices, is_gzipped
from .. import Pattern, SubPattern, PatternError, Label, Shape
from ..shapes import Polygon, Path
from ..repetition import Grid
from ..utils import get_bit, set_bit, layer_t, normalize_mirror, annotations_t
logger = logging.getLogger(__name__)
path_cap_map = {
None: Path.Cap.Flush,
0: Path.Cap.Flush,
1: Path.Cap.Circle,
2: Path.Cap.Square,
4: Path.Cap.SquareCustom,
}
def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]:
return numpy.rint(val, dtype=numpy.int32, casting='unsafe')
def build(
patterns: Union[Pattern, Sequence[Pattern]],
meters_per_unit: float,
logical_units_per_unit: float = 1,
library_name: str = 'masque-gdsii-write',
*,
modify_originals: bool = False,
disambiguate_func: Callable[[Iterable[Pattern]], None] = None,
) -> gdsii.library.Library:
"""
Convert a `Pattern` or list of patterns to a GDSII stream, by first calling
`.polygonize()` to change the shapes into polygons, and then writing patterns
as GDSII structures, polygons as boundary elements, and subpatterns as structure
references (sref).
For each shape,
layer is chosen to be equal to `shape.layer` if it is an int,
or `shape.layer[0]` if it is a tuple
datatype is chosen to be `shape.layer[1]` if available,
otherwise `0`
It is often a good idea to run `pattern.subpatternize()` prior to calling this function,
especially if calling `.polygonize()` will result in very many vertices.
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()`
prior to calling this function.
Args:
patterns: A Pattern or list of patterns to convert.
meters_per_unit: Written into the GDSII file, meters per (database) length unit.
All distances are assumed to be an integer multiple of this unit, and are stored as such.
logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a
"logical" unit which is different from the "database" unit, for display purposes.
Default `1`.
library_name: Library name written into the GDSII file.
Default 'masque-gdsii-write'.
modify_originals: If `True`, the original pattern is modified as part of the writing
process. Otherwise, a copy is made and `deepunlock()`-ed.
Default `False`.
disambiguate_func: Function which takes a list of patterns and alters them
to make their names valid and unique. Default is `disambiguate_pattern_names`, which
attempts to adhere to the GDSII standard as well as possible.
WARNING: No additional error checking is performed on the results.
Returns:
`gdsii.library.Library`
"""
if isinstance(patterns, Pattern):
patterns = [patterns]
if disambiguate_func is None:
disambiguate_func = disambiguate_pattern_names # type: ignore
assert(disambiguate_func is not None) # placate mypy
if not modify_originals:
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
patterns = [p.wrap_repeated_shapes() for p in patterns]
# Create library
lib = gdsii.library.Library(version=600,
name=library_name.encode('ASCII'),
logical_unit=logical_units_per_unit,
physical_unit=meters_per_unit)
# Get a dict of id(pattern) -> pattern
patterns_by_id = {id(pattern): pattern for pattern in patterns}
for pattern in patterns:
for i, p in pattern.referenced_patterns_by_id().items():
patterns_by_id[i] = p
disambiguate_func(patterns_by_id.values())
# Now create a structure for each pattern, and add in any Boundary and SREF elements
for pat in patterns_by_id.values():
structure = gdsii.structure.Structure(name=pat.name.encode('ASCII'))
lib.append(structure)
structure += _shapes_to_elements(pat.shapes)
structure += _labels_to_texts(pat.labels)
structure += _subpatterns_to_refs(pat.subpatterns)
return lib
def write(
patterns: Union[Pattern, Sequence[Pattern]],
stream: io.BufferedIOBase,
*args,
**kwargs,
) -> None:
"""
Write a `Pattern` or list of patterns to a GDSII file.
See `masque.file.gdsii.build()` for details.
Args:
patterns: A Pattern or list of patterns to write to file.
stream: Stream to write to.
*args: passed to `masque.file.gdsii.build()`
**kwargs: passed to `masque.file.gdsii.build()`
"""
lib = build(patterns, *args, **kwargs)
lib.save(stream)
return
def writefile(
patterns: Union[Sequence[Pattern], Pattern],
filename: Union[str, pathlib.Path],
*args,
**kwargs,
) -> None:
"""
Wrapper for `masque.file.gdsii.write()` that takes a filename or path instead of a stream.
Will automatically compress the file if it has a .gz suffix.
Args:
patterns: `Pattern` or list of patterns to save
filename: Filename to save to.
*args: passed to `masque.file.gdsii.write`
**kwargs: passed to `masque.file.gdsii.write`
"""
path = pathlib.Path(filename)
if path.suffix == '.gz':
open_func: Callable = gzip.open
else:
open_func = open
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
write(patterns, stream, *args, **kwargs)
def readfile(
filename: Union[str, pathlib.Path],
*args,
**kwargs,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
"""
Wrapper for `masque.file.gdsii.read()` that takes a filename or path instead of a stream.
Will automatically decompress gzipped files.
Args:
filename: Filename to save to.
*args: passed to `masque.file.gdsii.read`
**kwargs: passed to `masque.file.gdsii.read`
"""
path = pathlib.Path(filename)
if is_gzipped(path):
open_func: Callable = gzip.open
else:
open_func = open
with io.BufferedReader(open_func(path, mode='rb')) as stream:
results = read(stream, *args, **kwargs)
return results
def read(
stream: io.BufferedIOBase,
clean_vertices: bool = True,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
"""
Read a gdsii file and translate it into a dict of Pattern objects. GDSII structures are
translated into Pattern objects; boundaries are translated into polygons, and srefs and arefs
are translated into SubPattern objects.
Additional library info is returned in a dict, containing:
'name': name of the library
'meters_per_unit': number of meters per database unit (all values are in database units)
'logical_units_per_unit': number of "logical" units displayed by layout tools (typically microns)
per database unit
Args:
stream: Stream to read from.
clean_vertices: If `True`, remove any redundant vertices when loading polygons.
The cleaning process removes any polygons with zero area or <3 vertices.
Default `True`.
Returns:
- Dict of pattern_name:Patterns generated from GDSII structures
- Dict of GDSII library info
"""
lib = gdsii.library.Library.load(stream)
library_info = {'name': lib.name.decode('ASCII'),
'meters_per_unit': lib.physical_unit,
'logical_units_per_unit': lib.logical_unit,
}
raw_mode = True # Whether to construct shapes in raw mode (less error checking)
patterns = []
for structure in lib:
pat = Pattern(name=structure.name.decode('ASCII'))
for element in structure:
# Switch based on element type:
if isinstance(element, gdsii.elements.Boundary):
poly = _boundary_to_polygon(element, raw_mode)
pat.shapes.append(poly)
if isinstance(element, gdsii.elements.Path):
path = _gpath_to_mpath(element, raw_mode)
pat.shapes.append(path)
elif isinstance(element, gdsii.elements.Text):
label = Label(offset=element.xy.astype(float),
layer=(element.layer, element.text_type),
string=element.string.decode('ASCII'))
pat.labels.append(label)
elif isinstance(element, (gdsii.elements.SRef, gdsii.elements.ARef)):
pat.subpatterns.append(_ref_to_subpat(element))
if clean_vertices:
clean_pattern_vertices(pat)
patterns.append(pat)
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
# according to the subpattern.identifier (which is deleted after use).
patterns_dict = dict(((p.name, p) for p in patterns))
for p in patterns_dict.values():
for sp in p.subpatterns:
sp.pattern = patterns_dict[sp.identifier[0].decode('ASCII')]
del sp.identifier
return patterns_dict, library_info
def _mlayer2gds(mlayer: layer_t) -> Tuple[int, int]:
""" Helper to turn a layer tuple-or-int into a layer and datatype"""
if isinstance(mlayer, int):
layer = mlayer
data_type = 0
elif isinstance(mlayer, tuple):
layer = mlayer[0]
if len(mlayer) > 1:
data_type = mlayer[1]
else:
data_type = 0
else:
raise PatternError(f'Invalid layer for gdsii: {mlayer}. Note that gdsii layers cannot be strings.')
return layer, data_type
def _ref_to_subpat(
element: Union[gdsii.elements.SRef,
gdsii.elements.ARef]
) -> SubPattern:
"""
Helper function to create a SubPattern from an SREF or AREF. Sets subpat.pattern to None
and sets the instance .identifier to (struct_name,).
NOTE: "Absolute" means not affected by parent elements.
That's not currently supported by masque at all (and not planned).
"""
rotation = 0.0
offset = numpy.array(element.xy[0], dtype=float)
scale = 1.0
mirror_across_x = False
repetition = None
if element.strans is not None:
if element.mag is not None:
scale = element.mag
# Bit 13 means absolute scale
if get_bit(element.strans, 15 - 13):
raise PatternError('Absolute scale is not implemented in masque!')
if element.angle is not None:
rotation = numpy.deg2rad(element.angle)
# Bit 14 means absolute rotation
if get_bit(element.strans, 15 - 14):
raise PatternError('Absolute rotation is not implemented in masque!')
# Bit 0 means mirror x-axis
if get_bit(element.strans, 15 - 0):
mirror_across_x = True
if isinstance(element, gdsii.elements.ARef):
a_count = element.cols
b_count = element.rows
a_vector = (element.xy[1] - offset) / a_count
b_vector = (element.xy[2] - offset) / b_count
repetition = Grid(a_vector=a_vector, b_vector=b_vector,
a_count=a_count, b_count=b_count)
subpat = SubPattern(pattern=None,
offset=offset,
rotation=rotation,
scale=scale,
mirrored=(mirror_across_x, False),
annotations=_properties_to_annotations(element.properties),
repetition=repetition)
subpat.identifier = (element.struct_name,)
return subpat
def _gpath_to_mpath(element: gdsii.elements.Path, raw_mode: bool) -> Path:
if element.path_type in path_cap_map:
cap = path_cap_map[element.path_type]
else:
raise PatternError(f'Unrecognized path type: {element.path_type}')
args = {'vertices': element.xy.astype(float),
'layer': (element.layer, element.data_type),
'width': element.width if element.width is not None else 0.0,
'cap': cap,
'offset': numpy.zeros(2),
'annotations': _properties_to_annotations(element.properties),
'raw': raw_mode,
}
if cap == Path.Cap.SquareCustom:
args['cap_extensions'] = numpy.zeros(2)
if element.bgn_extn is not None:
args['cap_extensions'][0] = element.bgn_extn
if element.end_extn is not None:
args['cap_extensions'][1] = element.end_extn
return Path(**args)
def _boundary_to_polygon(element: gdsii.elements.Boundary, raw_mode: bool) -> Polygon:
args = {'vertices': element.xy[:-1].astype(float),
'layer': (element.layer, element.data_type),
'offset': numpy.zeros(2),
'annotations': _properties_to_annotations(element.properties),
'raw': raw_mode,
}
return Polygon(**args)
def _subpatterns_to_refs(
subpatterns: List[SubPattern],
) -> List[Union[gdsii.elements.ARef, gdsii.elements.SRef]]:
refs = []
for subpat in subpatterns:
if subpat.pattern is None:
continue
encoded_name = subpat.pattern.name.encode('ASCII')
# Note: GDS mirrors first and rotates second
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored)
rep = subpat.repetition
new_refs: List[Union[gdsii.elements.SRef, gdsii.elements.ARef]]
ref: Union[gdsii.elements.SRef, gdsii.elements.ARef]
if isinstance(rep, Grid):
b_vector = rep.b_vector if rep.b_vector is not None else numpy.zeros(2)
b_count = rep.b_count if rep.b_count is not None else 1
xy: NDArray[numpy.float64] = numpy.array(subpat.offset) + [
[0, 0],
rep.a_vector * rep.a_count,
b_vector * b_count,
]
ref = gdsii.elements.ARef(
struct_name=encoded_name,
xy=rint_cast(xy),
cols=rint_cast(rep.a_count),
rows=rint_cast(rep.b_count),
)
new_refs = [ref]
elif rep is None:
ref = gdsii.elements.SRef(
struct_name=encoded_name,
xy=rint_cast([subpat.offset]),
)
new_refs = [ref]
else:
new_refs = [gdsii.elements.SRef(
struct_name=encoded_name,
xy=rint_cast([subpat.offset + dd]),
)
for dd in rep.displacements]
for ref in new_refs:
ref.angle = numpy.rad2deg(subpat.rotation + extra_angle) % 360
# strans must be non-None for angle and mag to take effect
ref.strans = set_bit(0, 15 - 0, mirror_across_x)
ref.mag = subpat.scale
ref.properties = _annotations_to_properties(subpat.annotations, 512)
refs += new_refs
return refs
def _properties_to_annotations(properties: List[Tuple[int, bytes]]) -> annotations_t:
return {str(k): [v.decode()] for k, v in properties}
def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -> List[Tuple[int, bytes]]:
cum_len = 0
props = []
for key, vals in annotations.items():
try:
i = int(key)
except ValueError:
raise PatternError(f'Annotation key {key} is not convertable to an integer')
if not (0 < i < 126):
raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])')
val_strings = ' '.join(str(val) for val in vals)
b = val_strings.encode()
if len(b) > 126:
raise PatternError(f'Annotation value {b!r} is longer than 126 characters!')
cum_len += numpy.ceil(len(b) / 2) * 2 + 2
if cum_len > max_len:
raise PatternError(f'Sum of annotation data will be longer than {max_len} bytes! Generated bytes were {b!r}')
props.append((i, b))
return props
def _shapes_to_elements(
shapes: List[Shape],
polygonize_paths: bool = False,
) -> List[Union[gdsii.elements.Boundary, gdsii.elements.Path]]:
elements: List[Union[gdsii.elements.Boundary, gdsii.elements.Path]] = []
# Add a Boundary element for each shape, and Path elements if necessary
for shape in shapes:
layer, data_type = _mlayer2gds(shape.layer)
properties = _annotations_to_properties(shape.annotations, 128)
if isinstance(shape, Path) and not polygonize_paths:
xy = rint_cast(shape.vertices + shape.offset)
width = rint_cast(shape.width)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
path = gdsii.elements.Path(layer=layer,
data_type=data_type,
xy=xy)
path.path_type = path_type
path.width = width
path.properties = properties
elements.append(path)
else:
for polygon in shape.to_polygons():
xy_closed = numpy.empty((polygon.vertices.shape[0] + 1, 2), dtype=numpy.int32)
numpy.rint(polygon.vertices + polygon.offset, out=xy_closed[:-1], casting='unsafe')
xy_closed[-1] = xy_closed[0]
boundary = gdsii.elements.Boundary(
layer=layer,
data_type=data_type,
xy=xy_closed,
)
boundary.properties = properties
elements.append(boundary)
return elements
def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]:
texts = []
for label in labels:
properties = _annotations_to_properties(label.annotations, 128)
layer, text_type = _mlayer2gds(label.layer)
xy = rint_cast([label.offset])
text = gdsii.elements.Text(
layer=layer,
text_type=text_type,
xy=xy,
string=label.string.encode('ASCII'),
)
text.properties = properties
texts.append(text)
return texts
def disambiguate_pattern_names(
patterns: Sequence[Pattern],
max_name_length: int = 32,
suffix_length: int = 6,
dup_warn_filter: Optional[Callable[[str], bool]] = None,
) -> None:
"""
Args:
patterns: List of patterns to disambiguate
max_name_length: Names longer than this will be truncated
suffix_length: Names which get truncated are truncated by this many extra characters. This is to
leave room for a suffix if one is necessary.
dup_warn_filter: (optional) Function for suppressing warnings about cell names changing. Receives
the cell name and returns `False` if the warning should be suppressed and `True` if it should
be displayed. Default displays all warnings.
"""
used_names = []
for pat in set(patterns):
# Shorten names which already exceed max-length
if len(pat.name) > max_name_length:
shortened_name = pat.name[:max_name_length - suffix_length]
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n'
+ f' shortening to "{shortened_name}" before generating suffix')
else:
shortened_name = pat.name
# Remove invalid characters
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
# Add a suffix that makes the name unique
i = 0
suffixed_name = sanitized_name
while suffixed_name in used_names or suffixed_name == '':
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
i += 1
if sanitized_name == '':
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
+ f' renaming to "{suffixed_name}"')
# Encode into a byte-string and perform some final checks
encoded_name = suffixed_name.encode('ASCII')
if len(encoded_name) == 0:
# Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
if len(encoded_name) > max_name_length:
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n'
+ f' originally "{pat.name}"')
pat.name = suffixed_name
used_names.append(suffixed_name)

View File

@ -1,7 +1,7 @@
""" """
SVG file format readers and writers SVG file format readers and writers
""" """
from typing import Dict, Optional from typing import Mapping
import warnings import warnings
import numpy import numpy
@ -13,22 +13,23 @@ from .. import Pattern
def writefile( def writefile(
pattern: Pattern, library: Mapping[str, Pattern],
top: str,
filename: str, filename: str,
custom_attributes: bool = False, custom_attributes: bool = False,
) -> None: ) -> None:
""" """
Write a Pattern to an SVG file, by first calling .polygonize() on it Write a Pattern to an SVG file, by first calling .polygonize() on it
to change the shapes into polygons, and then writing patterns as SVG to change the shapes into polygons, and then writing patterns as SVG
groups (<g>, inside <defs>), polygons as paths (<path>), and subpatterns groups (<g>, inside <defs>), polygons as paths (<path>), and refs
as <use> elements. as <use> elements.
Note that this function modifies the Pattern. Note that this function modifies the Pattern.
If `custom_attributes` is `True`, non-standard `pattern_layer` and `pattern_dose` attributes If `custom_attributes` is `True`, a non-standard `pattern_layer` attribute
are written to the relevant elements. is written to the relevant elements.
It is often a good idea to run `pattern.subpatternize()` on pattern prior to It is often a good idea to run `pattern.dedup()` on pattern prior to
calling this function, especially if calling `.polygonize()` will result in very calling this function, especially if calling `.polygonize()` will result in very
many vertices. many vertices.
@ -38,14 +39,15 @@ def writefile(
Args: Args:
pattern: Pattern to write to file. Modified by this function. pattern: Pattern to write to file. Modified by this function.
filename: Filename to write to. filename: Filename to write to.
custom_attributes: Whether to write non-standard `pattern_layer` and custom_attributes: Whether to write non-standard `pattern_layer` attribute to the
`pattern_dose` attributes to the SVG elements. SVG elements.
""" """
pattern = library[top]
# Polygonize pattern # Polygonize pattern
pattern.polygonize() pattern.polygonize()
bounds = pattern.get_bounds() bounds = pattern.get_bounds(library=library)
if bounds is None: if bounds is None:
bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]]) bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]])
warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox') warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox')
@ -59,15 +61,9 @@ def writefile(
svg = svgwrite.Drawing(filename, profile='full', viewBox=viewbox_string, svg = svgwrite.Drawing(filename, profile='full', viewBox=viewbox_string,
debug=(not custom_attributes)) debug=(not custom_attributes))
# Get a dict of id(pattern) -> pattern # Now create a group for each pattern and add in any Boundary and Use elements
patterns_by_id = {**(pattern.referenced_patterns_by_id()), id(pattern): pattern} # type: Dict[int, Optional[Pattern]] for name, pat in library.items():
svg_group = svg.g(id=mangle_name(name), fill='blue', stroke='red')
# Now create a group for each row in sd_table (ie, each pattern + dose combination)
# and add in any Boundary and Use elements
for pat in patterns_by_id.values():
if pat is None:
continue
svg_group = svg.g(id=mangle_name(pat), fill='blue', stroke='red')
for shape in pat.shapes: for shape in pat.shapes:
for polygon in shape.to_polygons(): for polygon in shape.to_polygons():
@ -76,25 +72,26 @@ def writefile(
path = svg.path(d=path_spec) path = svg.path(d=path_spec)
if custom_attributes: if custom_attributes:
path['pattern_layer'] = polygon.layer path['pattern_layer'] = polygon.layer
path['pattern_dose'] = polygon.dose
svg_group.add(path) svg_group.add(path)
for subpat in pat.subpatterns: for ref in pat.refs:
if subpat.pattern is None: if ref.target is None:
continue continue
transform = f'scale({subpat.scale:g}) rotate({subpat.rotation:g}) translate({subpat.offset[0]:g},{subpat.offset[1]:g})' transform = f'scale({ref.scale:g}) rotate({ref.rotation:g}) translate({ref.offset[0]:g},{ref.offset[1]:g})'
use = svg.use(href='#' + mangle_name(subpat.pattern), transform=transform) use = svg.use(href='#' + mangle_name(ref.target), transform=transform)
if custom_attributes:
use['pattern_dose'] = subpat.dose
svg_group.add(use) svg_group.add(use)
svg.defs.add(svg_group) svg.defs.add(svg_group)
svg.add(svg.use(href='#' + mangle_name(pattern))) svg.add(svg.use(href='#' + mangle_name(top)))
svg.save() svg.save()
def writefile_inverted(pattern: Pattern, filename: str): def writefile_inverted(
library: Mapping[str, Pattern],
top: str,
filename: str,
) -> None:
""" """
Write an inverted Pattern to an SVG file, by first calling `.polygonize()` and Write an inverted Pattern to an SVG file, by first calling `.polygonize()` and
`.flatten()` on it to change the shapes into polygons, then drawing a bounding `.flatten()` on it to change the shapes into polygons, then drawing a bounding
@ -110,10 +107,12 @@ def writefile_inverted(pattern: Pattern, filename: str):
pattern: Pattern to write to file. Modified by this function. pattern: Pattern to write to file. Modified by this function.
filename: Filename to write to. filename: Filename to write to.
""" """
# Polygonize and flatten pattern pattern = library[top]
pattern.polygonize().flatten()
bounds = pattern.get_bounds() # Polygonize and flatten pattern
pattern.polygonize().flatten(library)
bounds = pattern.get_bounds(library=library)
if bounds is None: if bounds is None:
bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]]) bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]])
warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox') warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox')

View File

@ -1,29 +1,33 @@
""" """
Helper functions for file reading and writing Helper functions for file reading and writing
""" """
from typing import Set, Tuple, List from typing import IO, Iterator
import re import re
import copy
import pathlib import pathlib
import logging
import tempfile
import shutil
from contextlib import contextmanager
from .. import Pattern, PatternError from .. import Pattern, PatternError
from ..shapes import Polygon, Path from ..shapes import Polygon, Path
def mangle_name(pattern: Pattern, dose_multiplier: float = 1.0) -> str: logger = logging.getLogger(__name__)
def mangle_name(name: str) -> str:
""" """
Create a name using `pattern.name`, `id(pattern)`, and the dose multiplier. Sanitize a name.
Args: Args:
pattern: Pattern whose name we want to mangle. name: Name we want to mangle.
dose_multiplier: Dose multiplier to mangle with.
Returns: Returns:
Mangled name. Mangled name.
""" """
expression = re.compile(r'[^A-Za-z0-9_\?\$]') expression = re.compile(r'[^A-Za-z0-9_\?\$]')
full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern)) sanitized_name = expression.sub('_', name)
sanitized_name = expression.sub('_', full_name)
return sanitized_name return sanitized_name
@ -51,136 +55,25 @@ def clean_pattern_vertices(pat: Pattern) -> Pattern:
return pat return pat
def make_dose_table(patterns: List[Pattern], dose_multiplier: float = 1.0) -> Set[Tuple[int, float]]:
"""
Create a set containing `(id(pat), written_dose)` for each pattern (including subpatterns)
Args:
pattern: Source Patterns.
dose_multiplier: Multiplier for all written_dose entries.
Returns:
`{(id(subpat.pattern), written_dose), ...}`
"""
dose_table = {(id(pattern), dose_multiplier) for pattern in patterns}
for pattern in patterns:
for subpat in pattern.subpatterns:
if subpat.pattern is None:
continue
subpat_dose_entry = (id(subpat.pattern), subpat.dose * dose_multiplier)
if subpat_dose_entry not in dose_table:
subpat_dose_table = make_dose_table([subpat.pattern], subpat.dose * dose_multiplier)
dose_table = dose_table.union(subpat_dose_table)
return dose_table
def dtype2dose(pattern: Pattern) -> Pattern:
"""
For each shape in the pattern, if the layer is a tuple, set the
layer to the tuple's first element and set the dose to the
tuple's second element.
Generally intended for use with `Pattern.apply()`.
Args:
pattern: Pattern to modify
Returns:
pattern
"""
for shape in pattern.shapes:
if isinstance(shape.layer, tuple):
shape.dose = shape.layer[1]
shape.layer = shape.layer[0]
return pattern
def dose2dtype(
patterns: List[Pattern],
) -> Tuple[List[Pattern], List[float]]:
"""
For each shape in each pattern, set shape.layer to the tuple
(base_layer, datatype), where:
layer is chosen to be equal to the original shape.layer if it is an int,
or shape.layer[0] if it is a tuple. `str` layers raise a PatterError.
datatype is chosen arbitrarily, based on calcualted dose for each shape.
Shapes with equal calcualted dose will have the same datatype.
A list of doses is retured, providing a mapping between datatype
(list index) and dose (list entry).
Note that this function modifies the input Pattern(s).
Args:
patterns: A `Pattern` or list of patterns to write to file. Modified by this function.
Returns:
(patterns, dose_list)
patterns: modified input patterns
dose_list: A list of doses, providing a mapping between datatype (int, list index)
and dose (float, list entry).
"""
# Get a dict of id(pattern) -> pattern
patterns_by_id = {id(pattern): pattern for pattern in patterns}
for pattern in patterns:
for i, p in pattern.referenced_patterns_by_id().items():
patterns_by_id[i] = p
# Get a table of (id(pat), written_dose) for each pattern and subpattern
sd_table = make_dose_table(patterns)
# Figure out all the unique doses necessary to write this pattern
# This means going through each row in sd_table and adding the dose values needed to write
# that subpattern at that dose level
dose_vals = set()
for pat_id, pat_dose in sd_table:
pat = patterns_by_id[pat_id]
for shape in pat.shapes:
dose_vals.add(shape.dose * pat_dose)
if len(dose_vals) > 256:
raise PatternError('Too many dose values: {}, maximum 256 when using dtypes.'.format(len(dose_vals)))
dose_vals_list = list(dose_vals)
# Create a new pattern for each non-1-dose entry in the dose table
# and update the shapes to reflect their new dose
new_pats = {} # (id, dose) -> new_pattern mapping
for pat_id, pat_dose in sd_table:
if pat_dose == 1:
new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id]
continue
old_pat = patterns_by_id[pat_id]
pat = old_pat.copy() # keep old subpatterns
pat.shapes = copy.deepcopy(old_pat.shapes)
pat.labels = copy.deepcopy(old_pat.labels)
encoded_name = mangle_name(pat, pat_dose)
if len(encoded_name) == 0:
raise PatternError('Zero-length name after mangle+encode, originally "{}"'.format(pat.name))
pat.name = encoded_name
for shape in pat.shapes:
data_type = dose_vals_list.index(shape.dose * pat_dose)
if isinstance(shape.layer, int):
shape.layer = (shape.layer, data_type)
elif isinstance(shape.layer, tuple):
shape.layer = (shape.layer[0], data_type)
else:
raise PatternError(f'Invalid layer for gdsii: {shape.layer}')
new_pats[(pat_id, pat_dose)] = pat
# Go back through all the dose-specific patterns and fix up their subpattern entries
for (pat_id, pat_dose), pat in new_pats.items():
for subpat in pat.subpatterns:
dose_mult = subpat.dose * pat_dose
subpat.pattern = new_pats[(id(subpat.pattern), dose_mult)]
return patterns, dose_vals_list
def is_gzipped(path: pathlib.Path) -> bool: def is_gzipped(path: pathlib.Path) -> bool:
with open(path, 'rb') as stream: with open(path, 'rb') as stream:
magic_bytes = stream.read(2) magic_bytes = stream.read(2)
return magic_bytes == b'\x1f\x8b' return magic_bytes == b'\x1f\x8b'
@contextmanager
def tmpfile(path: str | pathlib.Path) -> Iterator[IO[bytes]]:
"""
Context manager which allows you to write to a temporary file,
and move that file into its final location only after the write
has finished.
"""
path = pathlib.Path(path)
suffixes = ''.join(path.suffixes)
with tempfile.NamedTemporaryFile(suffix=suffixes, delete=False) as tmp_stream:
yield tmp_stream
try:
shutil.move(tmp_stream.name, path)
finally:
pathlib.Path(tmp_stream.name).unlink(missing_ok=True)

View File

@ -1,4 +1,4 @@
from typing import Tuple, Dict, Optional, TypeVar from typing import Self
import copy import copy
import numpy import numpy
@ -6,26 +6,20 @@ from numpy.typing import ArrayLike, NDArray
from .repetition import Repetition from .repetition import Repetition
from .utils import rotation_matrix_2d, layer_t, AutoSlots, annotations_t from .utils import rotation_matrix_2d, layer_t, AutoSlots, annotations_t
from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, LockableImpl, RepeatableImpl from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, RepeatableImpl
from .traits import AnnotatableImpl from .traits import AnnotatableImpl
L = TypeVar('L', bound='Label') class Label(PositionableImpl, LayerableImpl, RepeatableImpl, AnnotatableImpl,
class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, AnnotatableImpl,
Pivotable, Copyable, metaclass=AutoSlots): Pivotable, Copyable, metaclass=AutoSlots):
""" """
A text annotation with a position and layer (but no size; it is not drawn) A text annotation with a position and layer (but no size; it is not drawn)
""" """
__slots__ = ( '_string', 'identifier') __slots__ = ( '_string', )
_string: str _string: str
""" Label string """ """ Label string """
identifier: Tuple
""" Arbitrary identifier tuple, useful for keeping track of history when flattening """
''' '''
---- Properties ---- Properties
''' '''
@ -47,37 +41,30 @@ class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, Annot
*, *,
offset: ArrayLike = (0.0, 0.0), offset: ArrayLike = (0.0, 0.0),
layer: layer_t = 0, layer: layer_t = 0,
repetition: Optional[Repetition] = None, repetition: Repetition | None = None,
annotations: Optional[annotations_t] = None, annotations: annotations_t | None = None,
locked: bool = False,
identifier: Tuple = (),
) -> None: ) -> None:
LockableImpl.unlock(self)
self.identifier = identifier
self.string = string self.string = string
self.offset = numpy.array(offset, dtype=float, copy=True) self.offset = numpy.array(offset, dtype=float, copy=True)
self.layer = layer self.layer = layer
self.repetition = repetition self.repetition = repetition
self.annotations = annotations if annotations is not None else {} self.annotations = annotations if annotations is not None else {}
self.set_locked(locked)
def __copy__(self: L) -> L: def __copy__(self) -> Self:
return type(self)(string=self.string, return type(self)(
string=self.string,
offset=self.offset.copy(), offset=self.offset.copy(),
layer=self.layer, layer=self.layer,
repetition=self.repetition, repetition=self.repetition,
locked=self.locked, )
identifier=self.identifier)
def __deepcopy__(self: L, memo: Dict = None) -> L: def __deepcopy__(self, memo: dict | None = None) -> Self:
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self) new = copy.copy(self)
LockableImpl.unlock(new)
new._offset = self._offset.copy() new._offset = self._offset.copy()
new.set_locked(self.locked)
return new return new
def rotate_around(self: L, pivot: ArrayLike, rotation: float) -> L: def rotate_around(self, pivot: ArrayLike, rotation: float) -> Self:
""" """
Rotate the label around a point. Rotate the label around a point.
@ -106,17 +93,3 @@ class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, Annot
Bounds [[xmin, xmax], [ymin, ymax]] Bounds [[xmin, xmax], [ymin, ymax]]
""" """
return numpy.array([self.offset, self.offset]) return numpy.array([self.offset, self.offset])
def lock(self: L) -> L:
PositionableImpl._lock(self)
LockableImpl.lock(self)
return self
def unlock(self: L) -> L:
LockableImpl.unlock(self)
PositionableImpl._unlock(self)
return self
def __repr__(self) -> str:
locked = ' L' if self.locked else ''
return f'<Label "{self.string}" l{self.layer} o{self.offset}{locked}>'

1043
masque/library.py Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,2 +0,0 @@
from .library import Library, PatternGenerator
from .device_library import DeviceLibrary, LibDeviceLibrary

View File

@ -1,298 +0,0 @@
"""
DeviceLibrary class for managing unique name->device mappings and
deferred loading or creation.
"""
from typing import Dict, Callable, TypeVar, TYPE_CHECKING
from typing import Any, Tuple, Union, Iterator
import logging
from pprint import pformat
from ..error import DeviceLibraryError
from ..library import Library
from ..builder import Device
from .. import Pattern
logger = logging.getLogger(__name__)
D = TypeVar('D', bound='DeviceLibrary')
L = TypeVar('L', bound='LibDeviceLibrary')
class DeviceLibrary:
"""
This class maps names to functions which generate or load the
relevant `Device` object.
This class largely functions the same way as `Library`, but
operates on `Device`s rather than `Patterns` and thus has no
need for distinctions between primary/secondary devices (as
there is no inter-`Device` hierarchy).
Each device is cached the first time it is used. The cache can
be disabled by setting the `enable_cache` attribute to `False`.
"""
generators: Dict[str, Callable[[], Device]]
cache: Dict[Union[str, Tuple[str, str]], Device]
enable_cache: bool = True
def __init__(self) -> None:
self.generators = {}
self.cache = {}
def __setitem__(self, key: str, value: Callable[[], Device]) -> None:
self.generators[key] = value
if key in self.cache:
del self.cache[key]
def __delitem__(self, key: str) -> None:
del self.generators[key]
if key in self.cache:
del self.cache[key]
def __getitem__(self, key: str) -> Device:
if self.enable_cache and key in self.cache:
logger.debug(f'found {key} in cache')
return self.cache[key]
logger.debug(f'loading {key}')
dev = self.generators[key]()
self.cache[key] = dev
return dev
def __iter__(self) -> Iterator[str]:
return iter(self.keys())
def __contains__(self, key: str) -> bool:
return key in self.generators
def keys(self) -> Iterator[str]:
return iter(self.generators.keys())
def values(self) -> Iterator[Device]:
return iter(self[key] for key in self.keys())
def items(self) -> Iterator[Tuple[str, Device]]:
return iter((key, self[key]) for key in self.keys())
def __repr__(self) -> str:
return '<DeviceLibrary with keys ' + repr(list(self.generators.keys())) + '>'
def set_const(self, const: Device) -> None:
"""
Convenience function to avoid having to manually wrap
already-generated Device objects into callables.
Args:
const: Pre-generated device object
"""
self.generators[const.pattern.name] = lambda: const
def add(
self: D,
other: D,
use_ours: Callable[[str], bool] = lambda name: False,
use_theirs: Callable[[str], bool] = lambda name: False,
) -> D:
"""
Add keys from another library into this one.
There must be no conflicting keys.
Args:
other: The library to insert keys from
use_ours: Decision function for name conflicts. Will be called with duplicate cell names.
Should return `True` if the value from `self` should be used.
use_theirs: Decision function for name conflicts. Same format as `use_ours`.
Should return `True` if the value from `other` should be used.
`use_ours` takes priority over `use_theirs`.
Returns:
self
"""
duplicates = set(self.keys()) & set(other.keys())
keep_ours = set(name for name in duplicates if use_ours(name))
keep_theirs = set(name for name in duplicates - keep_ours if use_theirs(name))
conflicts = duplicates - keep_ours - keep_theirs
if conflicts:
raise DeviceLibraryError('Duplicate keys encountered in DeviceLibrary merge: '
+ pformat(conflicts))
for name in set(other.generators.keys()) - keep_ours:
self.generators[name] = other.generators[name]
if name in other.cache:
self.cache[name] = other.cache[name]
return self
def clear_cache(self: D) -> D:
"""
Clear the cache of this library.
This is usually used before modifying or deleting cells, e.g. when merging
with another library.
Returns:
self
"""
self.cache = {}
return self
def add_device(
self,
name: str,
fn: Callable[[], Device],
dev2pat: Callable[[Device], Pattern],
prefix: str = '',
) -> None:
"""
Convenience function for adding a device to the library.
- The device is generated with the provided `fn()`
- Port info is written to the pattern using the provied dev2pat
- The pattern is renamed to match the provided `prefix + name`
- If `prefix` is non-empty, a wrapped copy is also added, named
`name` (no prefix). See `wrap_device()` for details.
Adding devices with this function helps to
- Make sure Pattern names are reflective of what the devices are named
- Ensure port info is written into the `Pattern`, so that the `Device`
can be reconstituted from the layout.
- Simplify adding a prefix to all device names, to make it easier to
track their provenance and purpose, while also allowing for
generic device names which can later be swapped out with different
underlying implementations.
Args:
name: Base name for the device. If a prefix is used, this is the
"generic" name (e.g. "L3_cavity" vs "2022_02_02_L3_cavity").
fn: Function which is called to generate the device.
dev2pat: Post-processing function which is called to add the port
info into the device's pattern.
prefix: If present, the actual device is named `prefix + name`, and
a second device with name `name` is also added (containing only
this one).
"""
def build_dev() -> Device:
dev = fn()
dev.pattern = dev2pat(dev)
dev.pattern.rename(prefix + name)
return dev
self[prefix + name] = build_dev
if prefix:
self.wrap_device(name, prefix + name)
def wrap_device(
self,
name: str,
old_name: str,
) -> None:
"""
Create a new device which simply contains an instance of an already-existing device.
This is useful for assigning an alternate name to a device, while still keeping
the original name available for traceability.
Args:
name: Name for the wrapped device.
old_name: Name of the existing device to wrap.
"""
def build_wrapped_dev() -> Device:
old_dev = self[old_name]
wrapper = Pattern(name=name)
wrapper.addsp(old_dev.pattern)
return Device(wrapper, old_dev.ports)
self[name] = build_wrapped_dev
class LibDeviceLibrary(DeviceLibrary):
"""
Extends `DeviceLibrary`, enabling it to ingest `Library` objects
(e.g. obtained by loading a GDS file).
Each `Library` object must be accompanied by a `pat2dev` function,
which takes in the `Pattern` and returns a full `Device` (including
port info). This is usually accomplished by scanning the `Pattern` for
port-related geometry, but could also bake in external info.
`Library` objects are ingested into `underlying`, which is a
`Library` which is kept in sync with the `DeviceLibrary` when
devices are removed (or new libraries added via `add_library()`).
"""
underlying: Library
def __init__(self) -> None:
DeviceLibrary.__init__(self)
self.underlying = Library()
def __setitem__(self, key: str, value: Callable[[], Device]) -> None:
self.generators[key] = value
if key in self.cache:
del self.cache[key]
# If any `Library` that has been (or will be) added has an entry for `key`,
# it will be added to `self.underlying` and then returned by it during subpattern
# resolution for other entries, and will conflict with the name for our
# wrapped device. To avoid that, we need to set ourselves as the "true" source of
# the `Pattern` named `key`.
if key in self.underlying:
raise DeviceLibraryError(f'Device name {key} already exists in underlying Library!'
' Demote or delete it first.')
# NOTE that this means the `Device` may be cached without the `Pattern` being in
# the `underlying` cache yet!
self.underlying.set_value(key, '__DeviceLibrary', lambda: self[key].pattern)
def __delitem__(self, key: str) -> None:
DeviceLibrary.__delitem__(self, key)
if key in self.underlying:
del self.underlying[key]
def add_library(
self: L,
lib: Library,
pat2dev: Callable[[Pattern], Device],
use_ours: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
use_theirs: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
) -> L:
"""
Add a pattern `Library` into this `LibDeviceLibrary`.
This requires a `pat2dev` function which can transform each `Pattern`
into a `Device`. For example, this can be accomplished by scanning
the `Pattern` data for port location info or by looking up port info
based on the pattern name or other characteristics in a hardcoded or
user-supplied dictionary.
Args:
lib: Pattern library to add.
pat2dev: Function for transforming each `Pattern` object from `lib`
into a `Device` which will be returned by this device library.
use_ours: Decision function for name conflicts. Will be called with
duplicate cell names, and (name, tag) tuples from the underlying library.
Should return `True` if the value from `self` should be used.
use_theirs: Decision function for name conflicts. Same format as `use_ours`.
Should return `True` if the value from `other` should be used.
`use_ours` takes priority over `use_theirs`.
Returns:
self
"""
duplicates = set(lib.keys()) & set(self.keys())
keep_ours = set(name for name in duplicates if use_ours(name))
keep_theirs = set(name for name in duplicates - keep_ours if use_theirs(name))
bad_duplicates = duplicates - keep_ours - keep_theirs
if bad_duplicates:
raise DeviceLibraryError('Duplicate devices (no action specified): ' + pformat(bad_duplicates))
# No 'bad' duplicates, so all duplicates should be overwritten
for name in keep_theirs:
self.underlying.demote(name)
self.underlying.add(lib, use_ours, use_theirs)
for name in lib:
self.generators[name] = lambda name=name: pat2dev(self.underlying[name])
return self

View File

@ -1,355 +0,0 @@
"""
Library class for managing unique name->pattern mappings and
deferred loading or creation.
"""
from typing import Dict, Callable, TypeVar, TYPE_CHECKING
from typing import Any, Tuple, Union, Iterator
import logging
from pprint import pformat
from dataclasses import dataclass
import copy
from ..error import LibraryError
if TYPE_CHECKING:
from ..pattern import Pattern
logger = logging.getLogger(__name__)
@dataclass
class PatternGenerator:
__slots__ = ('tag', 'gen')
tag: str
""" Unique identifier for the source """
gen: Callable[[], 'Pattern']
""" Function which generates a pattern when called """
L = TypeVar('L', bound='Library')
class Library:
"""
This class is usually used to create a library of Patterns by mapping names to
functions which generate or load the relevant `Pattern` object as-needed.
Generated/loaded patterns can have "symbolic" references, where a SubPattern
object `sp` has a `None`-valued `sp.pattern` attribute, in which case the
Library expects `sp.identifier[0]` to contain a string which specifies the
referenced pattern's name.
Patterns can either be "primary" (default) or "secondary". Both get the
same deferred-load behavior, but "secondary" patterns may have conflicting
names and are not accessible through basic []-indexing. They are only used
to fill symbolic references in cases where there is no "primary" pattern
available, and only if both the referencing and referenced pattern-generators'
`tag` values match (i.e., only if they came from the same source).
Primary patterns can be turned into secondary patterns with the `demote`
method, `promote` performs the reverse (secondary -> primary) operation.
The `set_const` and `set_value` methods provide an easy way to transparently
construct PatternGenerator objects and directly set create "secondary"
patterns.
The cache can be disabled by setting the `enable_cache` attribute to `False`.
"""
primary: Dict[str, PatternGenerator]
secondary: Dict[Tuple[str, str], PatternGenerator]
cache: Dict[Union[str, Tuple[str, str]], 'Pattern']
enable_cache: bool = True
def __init__(self) -> None:
self.primary = {}
self.secondary = {}
self.cache = {}
def __setitem__(self, key: str, value: PatternGenerator) -> None:
self.primary[key] = value
if key in self.cache:
logger.warning(f'Replaced library item "{key}" & existing cache entry.'
' Previously-generated Pattern will *not* be updated!')
del self.cache[key]
def __delitem__(self, key: str) -> None:
if isinstance(key, str):
del self.primary[key]
elif isinstance(key, tuple):
del self.secondary[key]
if key in self.cache:
logger.warning(f'Deleting library item "{key}" & existing cache entry.'
' Previously-generated Pattern may remain in the wild!')
del self.cache[key]
def __getitem__(self, key: str) -> 'Pattern':
return self.get_primary(key)
def __iter__(self) -> Iterator[str]:
return iter(self.keys())
def __contains__(self, key: str) -> bool:
return key in self.primary
def get_primary(self, key: str) -> 'Pattern':
if self.enable_cache and key in self.cache:
logger.debug(f'found {key} in cache')
return self.cache[key]
logger.debug(f'loading {key}')
pg = self.primary[key]
pat = pg.gen()
self.resolve_subpatterns(pat, pg.tag)
self.cache[key] = pat
return pat
def get_secondary(self, key: str, tag: str) -> 'Pattern':
logger.debug(f'get_secondary({key}, {tag})')
key2 = (key, tag)
if self.enable_cache and key2 in self.cache:
return self.cache[key2]
pg = self.secondary[key2]
pat = pg.gen()
self.resolve_subpatterns(pat, pg.tag)
self.cache[key2] = pat
return pat
def set_secondary(self, key: str, tag: str, value: PatternGenerator) -> None:
self.secondary[(key, tag)] = value
if (key, tag) in self.cache:
logger.warning(f'Replaced library item "{key}" & existing cache entry.'
' Previously-generated Pattern will *not* be updated!')
del self.cache[(key, tag)]
def resolve_subpatterns(self, pat: 'Pattern', tag: str) -> 'Pattern':
logger.debug(f'Resolving subpatterns in {pat.name}')
for sp in pat.subpatterns:
if sp.pattern is not None:
continue
key = sp.identifier[0]
if key in self.primary:
sp.pattern = self.get_primary(key)
continue
if (key, tag) in self.secondary:
sp.pattern = self.get_secondary(key, tag)
continue
raise LibraryError(f'Broken reference to {key} (tag {tag})')
return pat
def keys(self) -> Iterator[str]:
return iter(self.primary.keys())
def values(self) -> Iterator['Pattern']:
return iter(self[key] for key in self.keys())
def items(self) -> Iterator[Tuple[str, 'Pattern']]:
return iter((key, self[key]) for key in self.keys())
def __repr__(self) -> str:
return '<Library with keys ' + repr(list(self.primary.keys())) + '>'
def set_const(
self,
key: str,
tag: Any,
const: 'Pattern',
secondary: bool = False,
) -> None:
"""
Convenience function to avoid having to manually wrap
constant values into callables.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for the source, used to disambiguate secondary patterns
const: Pattern object to return
secondary: If True, this pattern is not accessible for normal lookup, and is
only used as a sub-component of other patterns if no non-secondary
equivalent is available.
"""
pg = PatternGenerator(tag=tag, gen=lambda: const)
if secondary:
self.secondary[(key, tag)] = pg
else:
self.primary[key] = pg
def set_value(
self,
key: str,
tag: str,
value: Callable[[], 'Pattern'],
secondary: bool = False,
) -> None:
"""
Convenience function to automatically build a PatternGenerator.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for the source, used to disambiguate secondary patterns
value: Callable which takes no arguments and generates the `Pattern` object
secondary: If True, this pattern is not accessible for normal lookup, and is
only used as a sub-component of other patterns if no non-secondary
equivalent is available.
"""
pg = PatternGenerator(tag=tag, gen=value)
if secondary:
self.secondary[(key, tag)] = pg
else:
self.primary[key] = pg
def precache(self: L) -> L:
"""
Force all patterns into the cache
Returns:
self
"""
for key in self.primary:
_ = self.get_primary(key)
for key2 in self.secondary:
_ = self.get_secondary(*key2)
return self
def add(
self: L,
other: L,
use_ours: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
use_theirs: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
) -> L:
"""
Add keys from another library into this one.
Args:
other: The library to insert keys from
use_ours: Decision function for name conflicts.
May be called with cell names and (name, tag) tuples for primary or
secondary cells, respectively.
Should return `True` if the value from `self` should be used.
use_theirs: Decision function for name conflicts. Same format as `use_ours`.
Should return `True` if the value from `other` should be used.
`use_ours` takes priority over `use_theirs`.
Returns:
self
"""
duplicates1 = set(self.primary.keys()) & set(other.primary.keys())
duplicates2 = set(self.secondary.keys()) & set(other.secondary.keys())
keep_ours1 = set(name for name in duplicates1 if use_ours(name))
keep_ours2 = set(name for name in duplicates2 if use_ours(name))
keep_theirs1 = set(name for name in duplicates1 - keep_ours1 if use_theirs(name))
keep_theirs2 = set(name for name in duplicates2 - keep_ours2 if use_theirs(name))
conflicts1 = duplicates1 - keep_ours1 - keep_theirs1
conflicts2 = duplicates2 - keep_ours2 - keep_theirs2
if conflicts1:
raise LibraryError('Unresolved duplicate keys encountered in library merge: ' + pformat(conflicts1))
if conflicts2:
raise LibraryError('Unresolved duplicate secondary keys encountered in library merge: ' + pformat(conflicts2))
for key1 in set(other.primary.keys()) - keep_ours1:
self[key1] = other.primary[key1]
if key1 in other.cache:
self.cache[key1] = other.cache[key1]
for key2 in set(other.secondary.keys()) - keep_ours2:
self.set_secondary(*key2, other.secondary[key2])
if key2 in other.cache:
self.cache[key2] = other.cache[key2]
return self
def demote(self, key: str) -> None:
"""
Turn a primary pattern into a secondary one.
It will no longer be accessible through [] indexing and will only be used to
when referenced by other patterns from the same source, and only if no primary
pattern with the same name exists.
Args:
key: Lookup key, usually the cell/pattern name
"""
pg = self.primary[key]
key2 = (key, pg.tag)
self.secondary[key2] = pg
if key in self.cache:
self.cache[key2] = self.cache[key]
del self[key]
def promote(self, key: str, tag: str) -> None:
"""
Turn a secondary pattern into a primary one.
It will become accessible through [] indexing and will be used to satisfy any
reference to a pattern with its key, regardless of tag.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for identifying the pattern's source, used to disambiguate
secondary patterns
"""
if key in self.primary:
raise LibraryError(f'Promoting ({key}, {tag}), but {key} already exists in primary!')
key2 = (key, tag)
pg = self.secondary[key2]
self.primary[key] = pg
if key2 in self.cache:
self.cache[key] = self.cache[key2]
del self.secondary[key2]
del self.cache[key2]
def copy(self, preserve_cache: bool = False) -> 'Library':
"""
Create a copy of this `Library`.
A shallow copy is made of the contained dicts.
Note that you should probably clear the cache (with `clear_cache()`) after copying.
Returns:
A copy of self
"""
new = Library()
new.primary.update(self.primary)
new.secondary.update(self.secondary)
new.cache.update(self.cache)
return new
def clear_cache(self: L) -> L:
"""
Clear the cache of this library.
This is usually used before modifying or deleting cells, e.g. when merging
with another library.
Returns:
self
"""
self.cache = {}
return self
r"""
# Add a filter for names which aren't added
- Registration:
- scanned files (tag=filename, gen_fn[stream, {name: pos}])
- generator functions (tag='fn?', gen_fn[params])
- merge decision function (based on tag and cell name, can be "neither") ??? neither=keep both, load using same tag!
- Load process:
- file:
- read single cell
- check subpat identifiers, and load stuff recursively based on those. If not present, load from same file??
- function:
- generate cell
- traverse and check if we should load any subcells from elsewhere. replace if so.
* should fn generate subcells at all, or register those separately and have us control flow? maybe ask us and generate itself if not present?
- Scan all GDS files, save name -> (file, position). Keep the streams handy.
- Merge all names. This requires subcell merge because we don't know hierarchy.
- possibly include a "neither" option during merge, to deal with subcells. Means: just use parent's file.
"""

File diff suppressed because it is too large Load Diff

401
masque/ports.py Normal file
View File

@ -0,0 +1,401 @@
from typing import Iterable, KeysView, ValuesView, overload, Self, Mapping
import warnings
import traceback
import logging
from collections import Counter
from abc import ABCMeta, abstractmethod
import numpy
from numpy import pi
from numpy.typing import ArrayLike, NDArray
from .traits import PositionableImpl, Rotatable, PivotableImpl, Copyable, Mirrorable
from .utils import rotate_offsets_around
from .error import PortError
logger = logging.getLogger(__name__)
class Port(PositionableImpl, Rotatable, PivotableImpl, Copyable, Mirrorable):
"""
A point at which a `Device` can be snapped to another `Device`.
Each port has an `offset` ((x, y) position) and may also have a
`rotation` (orientation) and a `ptype` (port type).
The `rotation` is an angle, in radians, measured counterclockwise
from the +x axis, pointing inwards into the device which owns the port.
The rotation may be set to `None`, indicating that any orientation is
allowed (e.g. for a DC electrical port). It is stored modulo 2pi.
The `ptype` is an arbitrary string, default of `unk` (unknown).
"""
__slots__ = (
'ptype', '_rotation',
# inherited:
'_offset',
)
_rotation: float | None
""" radians counterclockwise from +x, pointing into device body.
Can be `None` to signify undirected port """
ptype: str
""" Port types must match to be plugged together if both are non-zero """
def __init__(
self,
offset: ArrayLike,
rotation: float | None,
ptype: str = 'unk',
) -> None:
self.offset = offset
self.rotation = rotation
self.ptype = ptype
@property
def rotation(self) -> float | None:
""" Rotation, radians counterclockwise, pointing into device body. Can be None. """
return self._rotation
@rotation.setter
def rotation(self, val: float) -> None:
if val is None:
self._rotation = None
else:
if not numpy.size(val) == 1:
raise PortError('Rotation must be a scalar')
self._rotation = val % (2 * pi)
def get_bounds(self):
return numpy.vstack((self.offset, self.offset))
def set_ptype(self, ptype: str) -> Self:
""" Chainable setter for `ptype` """
self.ptype = ptype
return self
def mirror(self, axis: int) -> Self:
self.offset[1 - axis] *= -1
if self.rotation is not None:
self.rotation *= -1
self.rotation += axis * pi
return self
def rotate(self, rotation: float) -> Self:
if self.rotation is not None:
self.rotation += rotation
return self
def set_rotation(self, rotation: float | None) -> Self:
self.rotation = rotation
return self
def __repr__(self) -> str:
if self.rotation is None:
rot = 'any'
else:
rot = str(numpy.rad2deg(self.rotation))
return f'<{self.offset}, {rot}, [{self.ptype}]>'
class PortList(metaclass=ABCMeta):
__slots__ = () # Allow subclasses to use __slots__
@property
@abstractmethod
def ports(self) -> dict[str, Port]:
""" Uniquely-named ports which can be used to snap to other Device instances"""
pass
@ports.setter
@abstractmethod
def ports(self, value: dict[str, Port]) -> None:
pass
@overload
def __getitem__(self, key: str) -> Port:
pass
@overload
def __getitem__(self, key: list[str] | tuple[str, ...] | KeysView[str] | ValuesView[str]) -> dict[str, Port]:
pass
def __getitem__(self, key: str | Iterable[str]) -> Port | dict[str, Port]:
"""
For convenience, ports can be read out using square brackets:
- `pattern['A'] == Port((0, 0), 0)`
- ```
pattern[['A', 'B']] == {
'A': Port((0, 0), 0),
'B': Port((0, 0), pi),
}
```
"""
if isinstance(key, str):
return self.ports[key]
else:
return {k: self.ports[k] for k in key}
# NOTE: Didn't add keys(), items(), values(), __contains__(), etc.
# because it's weird on stuff like Pattern that contains other lists
# and because you can just grab .ports and use that instead
def rename_ports(
self,
mapping: dict[str, str | None],
overwrite: bool = False,
) -> Self:
"""
Renames ports as specified by `mapping`.
Ports can be explicitly deleted by mapping them to `None`.
Args:
mapping: dict of `{'old_name': 'new_name'}` pairs. Names can be mapped
to `None` to perform an explicit deletion. `'new_name'` can also
overwrite an existing non-renamed port to implicitly delete it if
`overwrite` is set to `True`.
overwrite: Allows implicit deletion of ports if set to `True`; see `mapping`.
Returns:
self
"""
if not overwrite:
duplicates = (set(self.ports.keys()) - set(mapping.keys())) & set(mapping.values())
if duplicates:
raise PortError(f'Unrenamed ports would be overwritten: {duplicates}')
renamed = {mapping[k]: self.ports.pop(k) for k in mapping.keys()}
if None in renamed:
del renamed[None]
self.ports.update(renamed) # type: ignore
return self
def add_port_pair(
self,
offset: ArrayLike = (0, 0),
rotation: float = 0.0,
names: tuple[str, str] = ('A', 'B'),
ptype: str = 'unk',
) -> Self:
"""
Add a pair of ports with opposing directions at the specified location.
Args:
offset: Location at which to add the ports
rotation: Orientation of the first port. Radians, counterclockwise.
Default 0.
names: Names for the two ports. Default 'A' and 'B'
ptype: Sets the port type for both ports.
Returns:
self
"""
new_ports = {
names[0]: Port(offset, rotation=rotation, ptype=ptype),
names[1]: Port(offset, rotation=rotation + pi, ptype=ptype),
}
self.check_ports(names)
self.ports.update(new_ports)
return self
def check_ports(
self,
other_names: Iterable[str],
map_in: dict[str, str] | None = None,
map_out: dict[str, str | None] | None = None,
) -> Self:
"""
Given the provided port mappings, check that:
- All of the ports specified in the mappings exist
- There are no duplicate port names after all the mappings are performed
Args:
other_names: List of port names being considered for inclusion into
`self.ports` (before mapping)
map_in: dict of `{'self_port': 'other_port'}` mappings, specifying
port connections between the two devices.
map_out: dict of `{'old_name': 'new_name'}` mappings, specifying
new names for unconnected `other_names` ports.
Returns:
self
Raises:
`PortError` if any ports specified in `map_in` or `map_out` do not
exist in `self.ports` or `other_names`.
`PortError` if there are any duplicate names after `map_in` and `map_out`
are applied.
"""
if map_in is None:
map_in = {}
if map_out is None:
map_out = {}
other = set(other_names)
missing_inkeys = set(map_in.keys()) - set(self.ports.keys())
if missing_inkeys:
raise PortError(f'`map_in` keys not present in device: {missing_inkeys}')
missing_invals = set(map_in.values()) - other
if missing_invals:
raise PortError(f'`map_in` values not present in other device: {missing_invals}')
missing_outkeys = set(map_out.keys()) - other
if missing_outkeys:
raise PortError(f'`map_out` keys not present in other device: {missing_outkeys}')
orig_remaining = set(self.ports.keys()) - set(map_in.keys())
other_remaining = other - set(map_out.keys()) - set(map_in.values())
mapped_vals = set(map_out.values())
mapped_vals.discard(None)
conflicts_final = orig_remaining & (other_remaining | mapped_vals)
if conflicts_final:
raise PortError(f'Device ports conflict with existing ports: {conflicts_final}')
conflicts_partial = other_remaining & mapped_vals
if conflicts_partial:
raise PortError(f'`map_out` targets conflict with non-mapped outputs: {conflicts_partial}')
map_out_counts = Counter(map_out.values())
map_out_counts[None] = 0
conflicts_out = {k for k, v in map_out_counts.items() if v > 1}
if conflicts_out:
raise PortError(f'Duplicate targets in `map_out`: {conflicts_out}')
return self
def find_transform(
self,
other: 'PortList',
map_in: dict[str, str],
*,
mirrored: tuple[bool, bool] = (False, False),
set_rotation: bool | None = None,
) -> tuple[NDArray[numpy.float64], float, NDArray[numpy.float64]]:
"""
Given a device `other` and a mapping `map_in` specifying port connections,
find the transform which will correctly align the specified ports.
Args:
other: a device
map_in: dict of `{'self_port': 'other_port'}` mappings, specifying
port connections between the two devices.
mirrored: Mirrors `other` across the x or y axes prior to
connecting any ports.
set_rotation: If the necessary rotation cannot be determined from
the ports being connected (i.e. all pairs have at least one
port with `rotation=None`), `set_rotation` must be provided
to indicate how much `other` should be rotated. Otherwise,
`set_rotation` must remain `None`.
Returns:
- The (x, y) translation (performed last)
- The rotation (radians, counterclockwise)
- The (x, y) pivot point for the rotation
The rotation should be performed before the translation.
"""
s_ports = self[map_in.keys()]
o_ports = other[map_in.values()]
return self.find_ptransform(
s_ports=s_ports,
o_ports=o_ports,
map_in=map_in,
mirrored=mirrored,
set_rotation=set_rotation,
)
@staticmethod
def find_ptransform( # TODO needs better name
s_ports: Mapping[str, Port],
o_ports: Mapping[str, Port],
map_in: dict[str, str],
*,
mirrored: tuple[bool, bool] = (False, False),
set_rotation: bool | None = None,
) -> tuple[NDArray[numpy.float64], float, NDArray[numpy.float64]]:
"""
Given two sets of ports (s_ports and o_ports) and a mapping `map_in`
specifying port connections, find the transform which will correctly
align the specified o_ports onto their respective s_ports.
Args:t
s_ports: A list of stationary ports
o_ports: A list of ports which are to be moved/mirrored.
map_in: dict of `{'s_port': 'o_port'}` mappings, specifying
port connections.
mirrored: Mirrors `o_ports` across the x or y axes prior to
connecting any ports.
set_rotation: If the necessary rotation cannot be determined from
the ports being connected (i.e. all pairs have at least one
port with `rotation=None`), `set_rotation` must be provided
to indicate how much `o_ports` should be rotated. Otherwise,
`set_rotation` must remain `None`.
Returns:
- The (x, y) translation (performed last)
- The rotation (radians, counterclockwise)
- The (x, y) pivot point for the rotation
The rotation should be performed before the translation.
"""
s_offsets = numpy.array([p.offset for p in s_ports.values()])
o_offsets = numpy.array([p.offset for p in o_ports.values()])
s_types = [p.ptype for p in s_ports.values()]
o_types = [p.ptype for p in o_ports.values()]
s_rotations = numpy.array([p.rotation if p.rotation is not None else 0 for p in s_ports.values()])
o_rotations = numpy.array([p.rotation if p.rotation is not None else 0 for p in o_ports.values()])
s_has_rot = numpy.array([p.rotation is not None for p in s_ports.values()], dtype=bool)
o_has_rot = numpy.array([p.rotation is not None for p in o_ports.values()], dtype=bool)
has_rot = s_has_rot & o_has_rot
if mirrored[0]:
o_offsets[:, 1] *= -1
o_rotations *= -1
if mirrored[1]:
o_offsets[:, 0] *= -1
o_rotations *= -1
o_rotations += pi
type_conflicts = numpy.array([st != ot and st != 'unk' and ot != 'unk'
for st, ot in zip(s_types, o_types)])
if type_conflicts.any():
msg = 'Ports have conflicting types:\n'
for nn, (k, v) in enumerate(map_in.items()):
if type_conflicts[nn]:
msg += f'{k} | {s_types[nn]}:{o_types[nn]} | {v}\n'
msg = ''.join(traceback.format_stack()) + '\n' + msg
warnings.warn(msg, stacklevel=2)
rotations = numpy.mod(s_rotations - o_rotations - pi, 2 * pi)
if not has_rot.any():
if set_rotation is None:
PortError('Must provide set_rotation if rotation is indeterminate')
rotations[:] = set_rotation
else:
rotations[~has_rot] = rotations[has_rot][0]
if not numpy.allclose(rotations[:1], rotations):
rot_deg = numpy.rad2deg(rotations)
msg = 'Port orientations do not match:\n'
for nn, (k, v) in enumerate(map_in.items()):
msg += f'{k} | {rot_deg[nn]:g} | {v}\n'
raise PortError(msg)
pivot = o_offsets[0].copy()
rotate_offsets_around(o_offsets, pivot, rotations[0])
translations = s_offsets - o_offsets
if not numpy.allclose(translations[:1], translations):
msg = 'Port translations do not match:\n'
for nn, (k, v) in enumerate(map_in.items()):
msg += f'{k} | {translations[nn]} | {v}\n'
raise PortError(msg)
return translations[0], rotations[0], o_offsets[0]

209
masque/ref.py Normal file
View File

@ -0,0 +1,209 @@
"""
Ref provides basic support for nesting Pattern objects within each other, by adding
offset, rotation, scaling, and other such properties to the reference.
"""
#TODO more top-level documentation
from typing import Sequence, Mapping, TYPE_CHECKING, Any, Self
import copy
import numpy
from numpy import pi
from numpy.typing import NDArray, ArrayLike
from .error import PatternError
from .utils import is_scalar, annotations_t
from .repetition import Repetition
from .traits import (
PositionableImpl, RotatableImpl, ScalableImpl,
Mirrorable, PivotableImpl, Copyable, RepeatableImpl, AnnotatableImpl,
)
if TYPE_CHECKING:
from . import Pattern
class Ref(
PositionableImpl, RotatableImpl, ScalableImpl, Mirrorable,
PivotableImpl, Copyable, RepeatableImpl, AnnotatableImpl,
):
"""
`Ref` provides basic support for nesting Pattern objects within each other, by adding
offset, rotation, scaling, and associated methods.
"""
__slots__ = (
'_target', '_mirrored',
# inherited
'_offset', '_rotation', 'scale', '_repetition', '_annotations',
)
_target: str | None
""" The name of the `Pattern` being instanced """
_mirrored: NDArray[numpy.bool_]
""" Whether to mirror the instance across the x and/or y axes. """
def __init__(
self,
target: str | None,
*,
offset: ArrayLike = (0.0, 0.0),
rotation: float = 0.0,
mirrored: Sequence[bool] | None = None,
scale: float = 1.0,
repetition: Repetition | None = None,
annotations: annotations_t | None = None,
) -> None:
"""
Args:
target: Name of the Pattern to reference.
offset: (x, y) offset applied to the referenced pattern. Not affected by rotation etc.
rotation: Rotation (radians, counterclockwise) relative to the referenced pattern's (0, 0).
mirrored: Whether to mirror the referenced pattern across its x and y axes.
scale: Scaling factor applied to the pattern's geometry.
repetition: `Repetition` object, default `None`
"""
self.target = target
self.offset = offset
self.rotation = rotation
self.scale = scale
if mirrored is None:
mirrored = (False, False)
self.mirrored = mirrored
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
def __copy__(self) -> 'Ref':
new = Ref(
target=self.target,
offset=self.offset.copy(),
rotation=self.rotation,
scale=self.scale,
mirrored=self.mirrored.copy(),
repetition=copy.deepcopy(self.repetition),
annotations=copy.deepcopy(self.annotations),
)
return new
def __deepcopy__(self, memo: dict | None = None) -> 'Ref':
memo = {} if memo is None else memo
new = copy.copy(self)
new.repetition = copy.deepcopy(self.repetition, memo)
new.annotations = copy.deepcopy(self.annotations, memo)
return new
# target property
@property
def target(self) -> str | None:
return self._target
@target.setter
def target(self, val: str | None) -> None:
if val is not None and not isinstance(val, str):
raise PatternError(f'Provided target {val} is not a str or None!')
self._target = val
# Mirrored property
@property
def mirrored(self) -> Any: # TODO mypy#3004 NDArray[numpy.bool_]:
return self._mirrored
@mirrored.setter
def mirrored(self, val: ArrayLike) -> None:
if is_scalar(val):
raise PatternError('Mirrored must be a 2-element list of booleans')
self._mirrored = numpy.array(val, dtype=bool, copy=True)
def as_pattern(
self,
*,
pattern: 'Pattern | None' = None,
library: Mapping[str, 'Pattern'] | None = None,
) -> 'Pattern':
"""
Args:
pattern: Pattern object to transform
library: A str->Pattern mapping, used instead of `pattern`. Must contain
`self.target`.
Returns:
A copy of the referenced Pattern which has been scaled, rotated, etc.
according to this `Ref`'s properties.
"""
if pattern is None:
if library is None:
raise PatternError('as_pattern() must be given a pattern or library.')
assert self.target is not None
pattern = library[self.target]
pattern = pattern.deepcopy()
if self.scale != 1:
pattern.scale_by(self.scale)
if numpy.any(self.mirrored):
pattern.mirror2d(self.mirrored)
if self.rotation % (2 * pi) != 0:
pattern.rotate_around((0.0, 0.0), self.rotation)
if numpy.any(self.offset):
pattern.translate_elements(self.offset)
if self.repetition is not None:
combined = type(pattern)()
for dd in self.repetition.displacements:
temp_pat = pattern.deepcopy()
temp_pat.ports = {}
temp_pat.translate_elements(dd)
combined.append(temp_pat)
pattern = combined
return pattern
def rotate(self, rotation: float) -> Self:
self.rotation += rotation
if self.repetition is not None:
self.repetition.rotate(rotation)
return self
def mirror(self, axis: int) -> Self:
self.mirrored[axis] = not self.mirrored[axis]
self.rotation *= -1
if self.repetition is not None:
self.repetition.mirror(axis)
return self
def get_bounds(
self,
*,
pattern: 'Pattern | None' = None,
library: Mapping[str, 'Pattern'] | None = None,
) -> NDArray[numpy.float64] | None:
"""
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
extent of the `Ref` in each dimension.
Returns `None` if the contained `Pattern` is empty.
Args:
library: Name-to-Pattern mapping for resul
Returns:
`[[x_min, y_min], [x_max, y_max]]` or `None`
"""
if pattern is None and library is None:
raise PatternError('as_pattern() must be given a pattern or library.')
if pattern is None and self.target is None:
return None
if library is not None and self.target not in library:
raise PatternError(f'get_bounds() called on dangling reference to "{self.target}"')
if pattern is not None and pattern.is_empty():
# no need to run as_pattern()
return None
return self.as_pattern(pattern=pattern, library=library).get_bounds(library)
def __repr__(self) -> str:
name = f'"{self.target}"' if self.target is not None else None
rotation = f' r{self.rotation*180/pi:g}' if self.rotation != 0 else ''
scale = f' d{self.scale:g}' if self.scale != 1 else ''
mirrored = ' m{:d}{:d}'.format(*self.mirrored) if self.mirrored.any() else ''
return f'<Ref {name} at {self.offset}{rotation}{scale}{mirrored}>'

View File

@ -3,23 +3,23 @@
instances of an object . instances of an object .
""" """
from typing import Union, Dict, Optional, Sequence, Any, Type from typing import Any, Type
import copy import copy
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import numpy import numpy
from numpy.typing import ArrayLike, NDArray from numpy.typing import ArrayLike, NDArray
from .traits import Copyable, Scalable, Rotatable, Mirrorable
from .error import PatternError from .error import PatternError
from .utils import rotation_matrix_2d, AutoSlots from .utils import rotation_matrix_2d
from .traits import LockableImpl, Copyable, Scalable, Rotatable, Mirrorable
class Repetition(Copyable, Rotatable, Mirrorable, Scalable, metaclass=ABCMeta): class Repetition(Copyable, Rotatable, Mirrorable, Scalable, metaclass=ABCMeta):
""" """
Interface common to all objects which specify repetitions Interface common to all objects which specify repetitions
""" """
__slots__ = () __slots__ = () # Allow subclasses to use __slots__
@property @property
@abstractmethod @abstractmethod
@ -30,7 +30,7 @@ class Repetition(Copyable, Rotatable, Mirrorable, Scalable, metaclass=ABCMeta):
pass pass
class Grid(LockableImpl, Repetition, metaclass=AutoSlots): class Grid(Repetition):
""" """
`Grid` describes a 2D grid formed by two basis vectors and two 'counts' (sizes). `Grid` describes a 2D grid formed by two basis vectors and two 'counts' (sizes).
@ -39,10 +39,10 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
Note that the offsets in either the 2D or 1D grids do not have to be axis-aligned. Note that the offsets in either the 2D or 1D grids do not have to be axis-aligned.
""" """
__slots__ = ('_a_vector', __slots__ = (
'_b_vector', '_a_vector', '_b_vector',
'_a_count', '_a_count', '_b_count',
'_b_count') )
_a_vector: NDArray[numpy.float64] _a_vector: NDArray[numpy.float64]
""" Vector `[x, y]` specifying the first lattice vector of the grid. """ Vector `[x, y]` specifying the first lattice vector of the grid.
@ -52,7 +52,7 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
_a_count: int _a_count: int
""" Number of instances along the direction specified by the `a_vector` """ """ Number of instances along the direction specified by the `a_vector` """
_b_vector: Optional[NDArray[numpy.float64]] _b_vector: NDArray[numpy.float64] | None
""" Vector `[x, y]` specifying a second lattice vector for the grid. """ Vector `[x, y]` specifying a second lattice vector for the grid.
Specifies center-to-center spacing between adjacent elements. Specifies center-to-center spacing between adjacent elements.
Can be `None` for a 1D array. Can be `None` for a 1D array.
@ -65,9 +65,8 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
self, self,
a_vector: ArrayLike, a_vector: ArrayLike,
a_count: int, a_count: int,
b_vector: Optional[ArrayLike] = None, b_vector: ArrayLike | None = None,
b_count: Optional[int] = 1, b_count: int | None = 1,
locked: bool = False,
) -> None: ) -> None:
""" """
Args: Args:
@ -79,7 +78,6 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
Can be omitted when specifying a 1D array. Can be omitted when specifying a 1D array.
b_count: Number of elements in the `b_vector` direction. b_count: Number of elements in the `b_vector` direction.
Should be omitted if `b_vector` was omitted. Should be omitted if `b_vector` was omitted.
locked: Whether the `Grid` is locked after initialization.
Raises: Raises:
PatternError if `b_*` inputs conflict with each other PatternError if `b_*` inputs conflict with each other
@ -99,12 +97,10 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
if b_count < 1: if b_count < 1:
raise PatternError(f'Repetition has too-small b_count: {b_count}') raise PatternError(f'Repetition has too-small b_count: {b_count}')
object.__setattr__(self, 'locked', False)
self.a_vector = a_vector # type: ignore # setter handles type conversion self.a_vector = a_vector # type: ignore # setter handles type conversion
self.b_vector = b_vector # type: ignore # setter handles type conversion self.b_vector = b_vector # type: ignore # setter handles type conversion
self.a_count = a_count self.a_count = a_count
self.b_count = b_count self.b_count = b_count
self.locked = locked
@classmethod @classmethod
def aligned( def aligned(
@ -129,18 +125,17 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
return cls(a_vector=(x, 0), b_vector=(0, y), a_count=x_count, b_count=y_count) return cls(a_vector=(x, 0), b_vector=(0, y), a_count=x_count, b_count=y_count)
def __copy__(self) -> 'Grid': def __copy__(self) -> 'Grid':
new = Grid(a_vector=self.a_vector.copy(), new = Grid(
a_vector=self.a_vector.copy(),
b_vector=copy.copy(self.b_vector), b_vector=copy.copy(self.b_vector),
a_count=self.a_count, a_count=self.a_count,
b_count=self.b_count, b_count=self.b_count,
locked=self.locked) )
return new return new
def __deepcopy__(self, memo: Dict = None) -> 'Grid': def __deepcopy__(self, memo: dict | None = None) -> 'Grid':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self) new = copy.copy(self)
LocakbleImpl.unlock(new)
new.locked = self.locked
return new return new
# a_vector property # a_vector property
@ -159,7 +154,7 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
# b_vector property # b_vector property
@property @property
def b_vector(self) -> Optional[NDArray[numpy.float64]]: def b_vector(self) -> NDArray[numpy.float64] | None:
return self._b_vector return self._b_vector
@b_vector.setter @b_vector.setter
@ -233,7 +228,7 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
self.b_vector[1 - axis] *= -1 self.b_vector[1 - axis] *= -1
return self return self
def get_bounds(self) -> Optional[NDArray[numpy.float64]]: def get_bounds(self) -> NDArray[numpy.float64] | None:
""" """
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
extent of the `Grid` in each dimension. extent of the `Grid` in each dimension.
@ -242,9 +237,9 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
`[[x_min, y_min], [x_max, y_max]]` or `None` `[[x_min, y_min], [x_max, y_max]]` or `None`
""" """
a_extent = self.a_vector * self.a_count a_extent = self.a_vector * self.a_count
b_extent = self.b_vector * self.b_count if (self.b_vector is not None) else 0 # type: Union[NDArray[numpy.float64], float] b_extent = self.b_vector * self.b_count if (self.b_vector is not None) else 0 # type: NDArray[numpy.float64] | float
corners = ((0, 0), a_extent, b_extent, a_extent + b_extent) corners = numpy.stack(((0, 0), a_extent, b_extent, a_extent + b_extent))
xy_min = numpy.min(corners, axis=0) xy_min = numpy.min(corners, axis=0)
xy_max = numpy.max(corners, axis=0) xy_max = numpy.max(corners, axis=0)
return numpy.array((xy_min, xy_max)) return numpy.array((xy_min, xy_max))
@ -264,36 +259,9 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
self.b_vector *= c self.b_vector *= c
return self return self
def lock(self) -> 'Grid':
"""
Lock the `Grid`, disallowing changes.
Returns:
self
"""
self.a_vector.flags.writeable = False
if self.b_vector is not None:
self.b_vector.flags.writeable = False
LockableImpl.lock(self)
return self
def unlock(self) -> 'Grid':
"""
Unlock the `Grid`
Returns:
self
"""
self.a_vector.flags.writeable = True
if self.b_vector is not None:
self.b_vector.flags.writeable = True
LockableImpl.unlock(self)
return self
def __repr__(self) -> str: def __repr__(self) -> str:
locked = ' L' if self.locked else ''
bv = f', {self.b_vector}' if self.b_vector is not None else '' bv = f', {self.b_vector}' if self.b_vector is not None else ''
return (f'<Grid {self.a_count}x{self.b_count} ({self.a_vector}{bv}){locked}>') return (f'<Grid {self.a_count}x{self.b_count} ({self.a_vector}{bv})>')
def __eq__(self, other: Any) -> bool: def __eq__(self, other: Any) -> bool:
if not isinstance(other, type(self)): if not isinstance(other, type(self)):
@ -308,12 +276,10 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
return False return False
if any(self.b_vector[ii] != other.b_vector[ii] for ii in range(2)): if any(self.b_vector[ii] != other.b_vector[ii] for ii in range(2)):
return False return False
if self.locked != other.locked:
return False
return True return True
class Arbitrary(LockableImpl, Repetition, metaclass=AutoSlots): class Arbitrary(Repetition):
""" """
`Arbitrary` is a simple list of (absolute) displacements for instances. `Arbitrary` is a simple list of (absolute) displacements for instances.
@ -342,48 +308,19 @@ class Arbitrary(LockableImpl, Repetition, metaclass=AutoSlots):
def __init__( def __init__(
self, self,
displacements: ArrayLike, displacements: ArrayLike,
locked: bool = False,
) -> None: ) -> None:
""" """
Args: Args:
displacements: List of vectors (Nx2 ndarray) specifying displacements. displacements: List of vectors (Nx2 ndarray) specifying displacements.
locked: Whether the object is locked after initialization.
""" """
object.__setattr__(self, 'locked', False)
self.displacements = displacements self.displacements = displacements
self.locked = locked
def lock(self) -> 'Arbitrary':
"""
Lock the object, disallowing changes.
Returns:
self
"""
self._displacements.flags.writeable = False
LockableImpl.lock(self)
return self
def unlock(self) -> 'Arbitrary':
"""
Unlock the object
Returns:
self
"""
self._displacements.flags.writeable = True
LockableImpl.unlock(self)
return self
def __repr__(self) -> str: def __repr__(self) -> str:
locked = ' L' if self.locked else '' return (f'<Arbitrary {len(self.displacements)}pts >')
return (f'<Arbitrary {len(self.displacements)}pts {locked}>')
def __eq__(self, other: Any) -> bool: def __eq__(self, other: Any) -> bool:
if not isinstance(other, type(self)): if not isinstance(other, type(self)):
return False return False
if self.locked != other.locked:
return False
return numpy.array_equal(self.displacements, other.displacements) return numpy.array_equal(self.displacements, other.displacements)
def rotate(self, rotation: float) -> 'Arbitrary': def rotate(self, rotation: float) -> 'Arbitrary':
@ -413,7 +350,7 @@ class Arbitrary(LockableImpl, Repetition, metaclass=AutoSlots):
self.displacements[1 - axis] *= -1 self.displacements[1 - axis] *= -1
return self return self
def get_bounds(self) -> Optional[NDArray[numpy.float64]]: def get_bounds(self) -> NDArray[numpy.float64] | None:
""" """
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
extent of the `displacements` in each dimension. extent of the `displacements` in each dimension.

View File

@ -3,7 +3,7 @@ Shapes for use with the Pattern class, as well as the Shape abstract class from
which they are derived. which they are derived.
""" """
from .shape import Shape, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS from .shape import Shape, normalized_shape_tuple, DEFAULT_POLY_NUM_VERTICES
from .polygon import Polygon from .polygon import Polygon
from .circle import Circle from .circle import Circle

View File

@ -1,4 +1,4 @@
from typing import List, Dict, Optional, Sequence, Any from typing import Sequence, Any
import copy import copy
import math import math
@ -6,14 +6,13 @@ import numpy
from numpy import pi from numpy import pi
from numpy.typing import NDArray, ArrayLike from numpy.typing import NDArray, ArrayLike
from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_VERTICES
from .. import PatternError from ..error import PatternError
from ..repetition import Repetition from ..repetition import Repetition
from ..utils import is_scalar, layer_t, AutoSlots, annotations_t from ..utils import is_scalar, layer_t, annotations_t
from ..traits import LockableImpl
class Arc(Shape, metaclass=AutoSlots): class Arc(Shape):
""" """
An elliptical arc, formed by cutting off an elliptical ring with two rays which exit from its An elliptical arc, formed by cutting off an elliptical ring with two rays which exit from its
center. It has a position, two radii, a start and stop angle, a rotation, and a width. center. It has a position, two radii, a start and stop angle, a rotation, and a width.
@ -22,8 +21,11 @@ class Arc(Shape, metaclass=AutoSlots):
The rotation gives the angle from x-axis, counterclockwise, to the first (x) radius. The rotation gives the angle from x-axis, counterclockwise, to the first (x) radius.
The start and stop angle are measured counterclockwise from the first (x) radius. The start and stop angle are measured counterclockwise from the first (x) radius.
""" """
__slots__ = ('_radii', '_angles', '_width', '_rotation', __slots__ = (
'poly_num_points', 'poly_max_arclen') '_radii', '_angles', '_width', '_rotation',
# Inherited
'_offset', '_layer', '_repetition', '_annotations',
)
_radii: NDArray[numpy.float64] _radii: NDArray[numpy.float64]
""" Two radii for defining an ellipse """ """ Two radii for defining an ellipse """
@ -37,12 +39,6 @@ class Arc(Shape, metaclass=AutoSlots):
_width: float _width: float
""" Width of the arc """ """ Width of the arc """
poly_num_points: Optional[int]
""" Sets the default number of points for `.polygonize()` """
poly_max_arclen: Optional[float]
""" Sets the default max segement length for `.polygonize()` """
# radius properties # radius properties
@property @property
def radii(self) -> Any: # TODO mypy#3004 NDArray[numpy.float64]: def radii(self) -> Any: # TODO mypy#3004 NDArray[numpy.float64]:
@ -157,24 +153,18 @@ class Arc(Shape, metaclass=AutoSlots):
angles: ArrayLike, angles: ArrayLike,
width: float, width: float,
*, *,
poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS,
poly_max_arclen: Optional[float] = None,
offset: ArrayLike = (0.0, 0.0), offset: ArrayLike = (0.0, 0.0),
rotation: float = 0, rotation: float = 0,
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, repetition: Repetition | None = None,
repetition: Optional[Repetition] = None, annotations: annotations_t | None = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False, raw: bool = False,
) -> None: ) -> None:
LockableImpl.unlock(self)
self.identifier = ()
if raw: if raw:
assert(isinstance(radii, numpy.ndarray)) assert isinstance(radii, numpy.ndarray)
assert(isinstance(angles, numpy.ndarray)) assert isinstance(angles, numpy.ndarray)
assert(isinstance(offset, numpy.ndarray)) assert isinstance(offset, numpy.ndarray)
self._radii = radii self._radii = radii
self._angles = angles self._angles = angles
self._width = width self._width = width
@ -183,7 +173,6 @@ class Arc(Shape, metaclass=AutoSlots):
self._repetition = repetition self._repetition = repetition
self._annotations = annotations if annotations is not None else {} self._annotations = annotations if annotations is not None else {}
self._layer = layer self._layer = layer
self._dose = dose
else: else:
self.radii = radii self.radii = radii
self.angles = angles self.angles = angles
@ -193,34 +182,23 @@ class Arc(Shape, metaclass=AutoSlots):
self.repetition = repetition self.repetition = repetition
self.annotations = annotations if annotations is not None else {} self.annotations = annotations if annotations is not None else {}
self.layer = layer self.layer = layer
self.dose = dose
self.poly_num_points = poly_num_points
self.poly_max_arclen = poly_max_arclen
[self.mirror(a) for a, do in enumerate(mirrored) if do] [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Arc': def __deepcopy__(self, memo: dict | None = None) -> 'Arc':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self) new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._radii = self._radii.copy() new._radii = self._radii.copy()
new._angles = self._angles.copy() new._angles = self._angles.copy()
new._annotations = copy.deepcopy(self._annotations) new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
def to_polygons( def to_polygons(
self, self,
poly_num_points: Optional[int] = None, num_vertices: int | None = DEFAULT_POLY_NUM_VERTICES,
poly_max_arclen: Optional[float] = None, max_arclen: float | None = None,
) -> List[Polygon]: ) -> list[Polygon]:
if poly_num_points is None: if (num_vertices is None) and (max_arclen is None):
poly_num_points = self.poly_num_points
if poly_max_arclen is None:
poly_max_arclen = self.poly_max_arclen
if (poly_num_points is None) and (poly_max_arclen is None):
raise PatternError('Max number of points and arclength left unspecified' raise PatternError('Max number of points and arclength left unspecified'
+ ' (default was also overridden)') + ' (default was also overridden)')
@ -238,18 +216,18 @@ class Arc(Shape, metaclass=AutoSlots):
perimeter = abs(a0 - a1) / (2 * pi) * ellipse_perimeter # TODO: make this more accurate perimeter = abs(a0 - a1) / (2 * pi) * ellipse_perimeter # TODO: make this more accurate
n = [] n = []
if poly_num_points is not None: if num_vertices is not None:
n += [poly_num_points] n += [num_vertices]
if poly_max_arclen is not None: if max_arclen is not None:
n += [perimeter / poly_max_arclen] n += [perimeter / max_arclen]
num_points = int(round(max(n))) num_vertices = int(round(max(n)))
wh = self.width / 2.0 wh = self.width / 2.0
if wh == r0 or wh == r1: if wh == r0 or wh == r1:
thetas_inner = numpy.zeros(1) # Don't generate multiple vertices if we're at the origin thetas_inner = numpy.zeros(1) # Don't generate multiple vertices if we're at the origin
else: else:
thetas_inner = numpy.linspace(a_ranges[0][1], a_ranges[0][0], num_points, endpoint=True) thetas_inner = numpy.linspace(a_ranges[0][1], a_ranges[0][0], num_vertices, endpoint=True)
thetas_outer = numpy.linspace(a_ranges[1][0], a_ranges[1][1], num_points, endpoint=True) thetas_outer = numpy.linspace(a_ranges[1][0], a_ranges[1][1], num_vertices, endpoint=True)
sin_th_i, cos_th_i = (numpy.sin(thetas_inner), numpy.cos(thetas_inner)) sin_th_i, cos_th_i = (numpy.sin(thetas_inner), numpy.cos(thetas_inner))
sin_th_o, cos_th_o = (numpy.sin(thetas_outer), numpy.cos(thetas_outer)) sin_th_o, cos_th_o = (numpy.sin(thetas_outer), numpy.cos(thetas_outer))
@ -263,7 +241,7 @@ class Arc(Shape, metaclass=AutoSlots):
ys = numpy.hstack((ys1, ys2)) ys = numpy.hstack((ys1, ys2))
xys = numpy.vstack((xs, ys)).T xys = numpy.vstack((xs, ys)).T
poly = Polygon(xys, dose=self.dose, layer=self.layer, offset=self.offset, rotation=self.rotation) poly = Polygon(xys, layer=self.layer, offset=self.offset, rotation=self.rotation)
return [poly] return [poly]
def get_bounds(self) -> NDArray[numpy.float64]: def get_bounds(self) -> NDArray[numpy.float64]:
@ -375,8 +353,13 @@ class Arc(Shape, metaclass=AutoSlots):
width = self.width width = self.width
return ((type(self), radii, angles, width / norm_value, self.layer), return ((type(self), radii, angles, width / norm_value, self.layer),
(self.offset, scale / norm_value, rotation, False, self.dose), (self.offset, scale / norm_value, rotation, False),
lambda: Arc(radii=radii * norm_value, angles=angles, width=width * norm_value, layer=self.layer)) lambda: Arc(
radii=radii * norm_value,
angles=angles,
width=width * norm_value,
layer=self.layer,
))
def get_cap_edges(self) -> NDArray[numpy.float64]: def get_cap_edges(self) -> NDArray[numpy.float64]:
''' '''
@ -429,21 +412,7 @@ class Arc(Shape, metaclass=AutoSlots):
a.append((a0, a1)) a.append((a0, a1))
return numpy.array(a) return numpy.array(a)
def lock(self) -> 'Arc':
self.radii.flags.writeable = False
self.angles.flags.writeable = False
Shape.lock(self)
return self
def unlock(self) -> 'Arc':
Shape.unlock(self)
self.radii.flags.writeable = True
self.angles.flags.writeable = True
return self
def __repr__(self) -> str: def __repr__(self) -> str:
angles = f'{numpy.rad2deg(self.angles)}' angles = f'{numpy.rad2deg(self.angles)}'
rotation = f'{numpy.rad2deg(self.rotation):g}' if self.rotation != 0 else '' rotation = f'{numpy.rad2deg(self.rotation):g}' if self.rotation != 0 else ''
dose = f' d{self.dose:g}' if self.dose != 1 else '' return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}>'
locked = ' L' if self.locked else ''
return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}{dose}{locked}>'

View File

@ -1,32 +1,28 @@
from typing import List, Dict, Optional
import copy import copy
import numpy import numpy
from numpy import pi from numpy import pi
from numpy.typing import NDArray, ArrayLike from numpy.typing import NDArray, ArrayLike
from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_VERTICES
from .. import PatternError from ..error import PatternError
from ..repetition import Repetition from ..repetition import Repetition
from ..utils import is_scalar, layer_t, AutoSlots, annotations_t from ..utils import is_scalar, layer_t, annotations_t
from ..traits import LockableImpl
class Circle(Shape, metaclass=AutoSlots): class Circle(Shape):
""" """
A circle, which has a position and radius. A circle, which has a position and radius.
""" """
__slots__ = ('_radius', 'poly_num_points', 'poly_max_arclen') __slots__ = (
'_radius',
# Inherited
'_offset', '_layer', '_repetition', '_annotations',
)
_radius: float _radius: float
""" Circle radius """ """ Circle radius """
poly_num_points: Optional[int]
""" Sets the default number of points for `.polygonize()` """
poly_max_arclen: Optional[float]
""" Sets the default max segement length for `.polygonize()` """
# radius property # radius property
@property @property
def radius(self) -> float: def radius(self) -> float:
@ -47,72 +43,54 @@ class Circle(Shape, metaclass=AutoSlots):
self, self,
radius: float, radius: float,
*, *,
poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS,
poly_max_arclen: Optional[float] = None,
offset: ArrayLike = (0.0, 0.0), offset: ArrayLike = (0.0, 0.0),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, repetition: Repetition | None = None,
repetition: Optional[Repetition] = None, annotations: annotations_t | None = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False, raw: bool = False,
) -> None: ) -> None:
LockableImpl.unlock(self)
self.identifier = ()
if raw: if raw:
assert(isinstance(offset, numpy.ndarray)) assert isinstance(offset, numpy.ndarray)
self._radius = radius self._radius = radius
self._offset = offset self._offset = offset
self._repetition = repetition self._repetition = repetition
self._annotations = annotations if annotations is not None else {} self._annotations = annotations if annotations is not None else {}
self._layer = layer self._layer = layer
self._dose = dose
else: else:
self.radius = radius self.radius = radius
self.offset = offset self.offset = offset
self.repetition = repetition self.repetition = repetition
self.annotations = annotations if annotations is not None else {} self.annotations = annotations if annotations is not None else {}
self.layer = layer self.layer = layer
self.dose = dose
self.poly_num_points = poly_num_points
self.poly_max_arclen = poly_max_arclen
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Circle': def __deepcopy__(self, memo: dict | None = None) -> 'Circle':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self) new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._annotations = copy.deepcopy(self._annotations) new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
def to_polygons( def to_polygons(
self, self,
poly_num_points: Optional[int] = None, num_vertices: int | None = DEFAULT_POLY_NUM_VERTICES,
poly_max_arclen: Optional[float] = None, max_arclen: float | None = None,
) -> List[Polygon]: ) -> list[Polygon]:
if poly_num_points is None: if (num_vertices is None) and (max_arclen is None):
poly_num_points = self.poly_num_points
if poly_max_arclen is None:
poly_max_arclen = self.poly_max_arclen
if (poly_num_points is None) and (poly_max_arclen is None):
raise PatternError('Number of points and arclength left ' raise PatternError('Number of points and arclength left '
'unspecified (default was also overridden)') 'unspecified (default was also overridden)')
n: List[float] = [] n: list[float] = []
if poly_num_points is not None: if num_vertices is not None:
n += [poly_num_points] n += [num_vertices]
if poly_max_arclen is not None: if max_arclen is not None:
n += [2 * pi * self.radius / poly_max_arclen] n += [2 * pi * self.radius / max_arclen]
num_points = int(round(max(n))) num_vertices = int(round(max(n)))
thetas = numpy.linspace(2 * pi, 0, num_points, endpoint=False) thetas = numpy.linspace(2 * pi, 0, num_vertices, endpoint=False)
xs = numpy.cos(thetas) * self.radius xs = numpy.cos(thetas) * self.radius
ys = numpy.sin(thetas) * self.radius ys = numpy.sin(thetas) * self.radius
xys = numpy.vstack((xs, ys)).T xys = numpy.vstack((xs, ys)).T
return [Polygon(xys, offset=self.offset, dose=self.dose, layer=self.layer)] return [Polygon(xys, offset=self.offset, layer=self.layer)]
def get_bounds(self) -> NDArray[numpy.float64]: def get_bounds(self) -> NDArray[numpy.float64]:
return numpy.vstack((self.offset - self.radius, return numpy.vstack((self.offset - self.radius,
@ -133,10 +111,8 @@ class Circle(Shape, metaclass=AutoSlots):
rotation = 0.0 rotation = 0.0
magnitude = self.radius / norm_value magnitude = self.radius / norm_value
return ((type(self), self.layer), return ((type(self), self.layer),
(self.offset, magnitude, rotation, False, self.dose), (self.offset, magnitude, rotation, False),
lambda: Circle(radius=norm_value, layer=self.layer)) lambda: Circle(radius=norm_value, layer=self.layer))
def __repr__(self) -> str: def __repr__(self) -> str:
dose = f' d{self.dose:g}' if self.dose != 1 else '' return f'<Circle l{self.layer} o{self.offset} r{self.radius:g}>'
locked = ' L' if self.locked else ''
return f'<Circle l{self.layer} o{self.offset} r{self.radius:g}{dose}{locked}>'

View File

@ -1,4 +1,4 @@
from typing import List, Dict, Sequence, Optional, Any from typing import Sequence, Any
import copy import copy
import math import math
@ -6,20 +6,22 @@ import numpy
from numpy import pi from numpy import pi
from numpy.typing import ArrayLike, NDArray from numpy.typing import ArrayLike, NDArray
from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_VERTICES
from .. import PatternError from ..error import PatternError
from ..repetition import Repetition from ..repetition import Repetition
from ..utils import is_scalar, rotation_matrix_2d, layer_t, AutoSlots, annotations_t from ..utils import is_scalar, rotation_matrix_2d, layer_t, annotations_t
from ..traits import LockableImpl
class Ellipse(Shape, metaclass=AutoSlots): class Ellipse(Shape):
""" """
An ellipse, which has a position, two radii, and a rotation. An ellipse, which has a position, two radii, and a rotation.
The rotation gives the angle from x-axis, counterclockwise, to the first (x) radius. The rotation gives the angle from x-axis, counterclockwise, to the first (x) radius.
""" """
__slots__ = ('_radii', '_rotation', __slots__ = (
'poly_num_points', 'poly_max_arclen') '_radii', '_rotation',
# Inherited
'_offset', '_layer', '_repetition', '_annotations',
)
_radii: NDArray[numpy.float64] _radii: NDArray[numpy.float64]
""" Ellipse radii """ """ Ellipse radii """
@ -27,12 +29,6 @@ class Ellipse(Shape, metaclass=AutoSlots):
_rotation: float _rotation: float
""" Angle from x-axis to first radius (ccw, radians) """ """ Angle from x-axis to first radius (ccw, radians) """
poly_num_points: Optional[int]
""" Sets the default number of points for `.polygonize()` """
poly_max_arclen: Optional[float]
""" Sets the default max segement length for `.polygonize()` """
# radius properties # radius properties
@property @property
def radii(self) -> Any: # TODO mypy#3004 NDArray[numpy.float64]: def radii(self) -> Any: # TODO mypy#3004 NDArray[numpy.float64]:
@ -92,30 +88,23 @@ class Ellipse(Shape, metaclass=AutoSlots):
self, self,
radii: ArrayLike, radii: ArrayLike,
*, *,
poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS,
poly_max_arclen: Optional[float] = None,
offset: ArrayLike = (0.0, 0.0), offset: ArrayLike = (0.0, 0.0),
rotation: float = 0, rotation: float = 0,
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, repetition: Repetition | None = None,
repetition: Optional[Repetition] = None, annotations: annotations_t | None = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False, raw: bool = False,
) -> None: ) -> None:
LockableImpl.unlock(self)
self.identifier = ()
if raw: if raw:
assert(isinstance(radii, numpy.ndarray)) assert isinstance(radii, numpy.ndarray)
assert(isinstance(offset, numpy.ndarray)) assert isinstance(offset, numpy.ndarray)
self._radii = radii self._radii = radii
self._offset = offset self._offset = offset
self._rotation = rotation self._rotation = rotation
self._repetition = repetition self._repetition = repetition
self._annotations = annotations if annotations is not None else {} self._annotations = annotations if annotations is not None else {}
self._layer = layer self._layer = layer
self._dose = dose
else: else:
self.radii = radii self.radii = radii
self.offset = offset self.offset = offset
@ -123,33 +112,22 @@ class Ellipse(Shape, metaclass=AutoSlots):
self.repetition = repetition self.repetition = repetition
self.annotations = annotations if annotations is not None else {} self.annotations = annotations if annotations is not None else {}
self.layer = layer self.layer = layer
self.dose = dose
[self.mirror(a) for a, do in enumerate(mirrored) if do] [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.poly_num_points = poly_num_points
self.poly_max_arclen = poly_max_arclen
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Ellipse': def __deepcopy__(self, memo: dict | None = None) -> 'Ellipse':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self) new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._radii = self._radii.copy() new._radii = self._radii.copy()
new._annotations = copy.deepcopy(self._annotations) new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
def to_polygons( def to_polygons(
self, self,
poly_num_points: Optional[int] = None, num_vertices: int | None = DEFAULT_POLY_NUM_VERTICES,
poly_max_arclen: Optional[float] = None, max_arclen: float | None = None,
) -> List[Polygon]: ) -> list[Polygon]:
if poly_num_points is None: if (num_vertices is None) and (max_arclen is None):
poly_num_points = self.poly_num_points
if poly_max_arclen is None:
poly_max_arclen = self.poly_max_arclen
if (poly_num_points is None) and (poly_max_arclen is None):
raise PatternError('Number of points and arclength left unspecified' raise PatternError('Number of points and arclength left unspecified'
' (default was also overridden)') ' (default was also overridden)')
@ -162,19 +140,19 @@ class Ellipse(Shape, metaclass=AutoSlots):
perimeter = pi * (r1 + r0) * (1 + 3 * h / (10 + math.sqrt(4 - 3 * h))) perimeter = pi * (r1 + r0) * (1 + 3 * h / (10 + math.sqrt(4 - 3 * h)))
n = [] n = []
if poly_num_points is not None: if num_vertices is not None:
n += [poly_num_points] n += [num_vertices]
if poly_max_arclen is not None: if max_arclen is not None:
n += [perimeter / poly_max_arclen] n += [perimeter / max_arclen]
num_points = int(round(max(n))) num_vertices = int(round(max(n)))
thetas = numpy.linspace(2 * pi, 0, num_points, endpoint=False) thetas = numpy.linspace(2 * pi, 0, num_vertices, endpoint=False)
sin_th, cos_th = (numpy.sin(thetas), numpy.cos(thetas)) sin_th, cos_th = (numpy.sin(thetas), numpy.cos(thetas))
xs = r0 * cos_th xs = r0 * cos_th
ys = r1 * sin_th ys = r1 * sin_th
xys = numpy.vstack((xs, ys)).T xys = numpy.vstack((xs, ys)).T
poly = Polygon(xys, dose=self.dose, layer=self.layer, offset=self.offset, rotation=self.rotation) poly = Polygon(xys, layer=self.layer, offset=self.offset, rotation=self.rotation)
return [poly] return [poly]
def get_bounds(self) -> NDArray[numpy.float64]: def get_bounds(self) -> NDArray[numpy.float64]:
@ -206,21 +184,9 @@ class Ellipse(Shape, metaclass=AutoSlots):
scale = self.radius_y scale = self.radius_y
angle = (self.rotation + pi / 2) % pi angle = (self.rotation + pi / 2) % pi
return ((type(self), radii, self.layer), return ((type(self), radii, self.layer),
(self.offset, scale / norm_value, angle, False, self.dose), (self.offset, scale / norm_value, angle, False),
lambda: Ellipse(radii=radii * norm_value, layer=self.layer)) lambda: Ellipse(radii=radii * norm_value, layer=self.layer))
def lock(self) -> 'Ellipse':
self.radii.flags.writeable = False
Shape.lock(self)
return self
def unlock(self) -> 'Ellipse':
Shape.unlock(self)
self.radii.flags.writeable = True
return self
def __repr__(self) -> str: def __repr__(self) -> str:
rotation = f' r{self.rotation*180/pi:g}' if self.rotation != 0 else '' rotation = f' r{self.rotation*180/pi:g}' if self.rotation != 0 else ''
dose = f' d{self.dose:g}' if self.dose != 1 else '' return f'<Ellipse l{self.layer} o{self.offset} r{self.radii}{rotation}>'
locked = ' L' if self.locked else ''
return f'<Ellipse l{self.layer} o{self.offset} r{self.radii}{rotation}{dose}{locked}>'

View File

@ -1,4 +1,4 @@
from typing import List, Tuple, Dict, Optional, Sequence, Any from typing import Sequence, Any, cast
import copy import copy
from enum import Enum from enum import Enum
@ -7,11 +7,10 @@ from numpy import pi, inf
from numpy.typing import NDArray, ArrayLike from numpy.typing import NDArray, ArrayLike
from . import Shape, normalized_shape_tuple, Polygon, Circle from . import Shape, normalized_shape_tuple, Polygon, Circle
from .. import PatternError from ..error import PatternError
from ..repetition import Repetition from ..repetition import Repetition
from ..utils import is_scalar, rotation_matrix_2d, layer_t, AutoSlots from ..utils import is_scalar, rotation_matrix_2d, layer_t
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
from ..traits import LockableImpl
class PathCap(Enum): class PathCap(Enum):
@ -22,18 +21,22 @@ class PathCap(Enum):
# # defined by path.cap_extensions # # defined by path.cap_extensions
class Path(Shape, metaclass=AutoSlots): class Path(Shape):
""" """
A path, consisting of a bunch of vertices (Nx2 ndarray), a width, an end-cap shape, A path, consisting of a bunch of vertices (Nx2 ndarray), a width, an end-cap shape,
and an offset. and an offset.
A normalized_form(...) is available, but can be quite slow with lots of vertices. A normalized_form(...) is available, but can be quite slow with lots of vertices.
""" """
__slots__ = ('_vertices', '_width', '_cap', '_cap_extensions') __slots__ = (
'_vertices', '_width', '_cap', '_cap_extensions',
# Inherited
'_offset', '_layer', '_repetition', '_annotations',
)
_vertices: NDArray[numpy.float64] _vertices: NDArray[numpy.float64]
_width: float _width: float
_cap: PathCap _cap: PathCap
_cap_extensions: Optional[NDArray[numpy.float64]] _cap_extensions: NDArray[numpy.float64] | None
Cap = PathCap Cap = PathCap
@ -73,7 +76,7 @@ class Path(Shape, metaclass=AutoSlots):
# cap_extensions property # cap_extensions property
@property @property
def cap_extensions(self) -> Optional[Any]: #TODO mypy#3004 NDArray[numpy.float64]]: def cap_extensions(self) -> Any | None: # TODO mypy#3004 NDArray[numpy.float64]]:
""" """
Path end-cap extension Path end-cap extension
@ -83,7 +86,7 @@ class Path(Shape, metaclass=AutoSlots):
return self._cap_extensions return self._cap_extensions
@cap_extensions.setter @cap_extensions.setter
def cap_extensions(self, vals: Optional[ArrayLike]) -> None: def cap_extensions(self, vals: ArrayLike | None) -> None:
custom_caps = (PathCap.SquareCustom,) custom_caps = (PathCap.SquareCustom,)
if self.cap in custom_caps: if self.cap in custom_caps:
if vals is None: if vals is None:
@ -147,31 +150,26 @@ class Path(Shape, metaclass=AutoSlots):
width: float = 0.0, width: float = 0.0,
*, *,
cap: PathCap = PathCap.Flush, cap: PathCap = PathCap.Flush,
cap_extensions: Optional[ArrayLike] = None, cap_extensions: ArrayLike | None = None,
offset: ArrayLike = (0.0, 0.0), offset: ArrayLike = (0.0, 0.0),
rotation: float = 0, rotation: float = 0,
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, repetition: Repetition | None = None,
repetition: Optional[Repetition] = None, annotations: annotations_t | None = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False, raw: bool = False,
) -> None: ) -> None:
LockableImpl.unlock(self)
self._cap_extensions = None # Since .cap setter might access it self._cap_extensions = None # Since .cap setter might access it
self.identifier = ()
if raw: if raw:
assert(isinstance(vertices, numpy.ndarray)) assert isinstance(vertices, numpy.ndarray)
assert(isinstance(offset, numpy.ndarray)) assert isinstance(offset, numpy.ndarray)
assert(isinstance(cap_extensions, numpy.ndarray) or cap_extensions is None) assert isinstance(cap_extensions, numpy.ndarray) or cap_extensions is None
self._vertices = vertices self._vertices = vertices
self._offset = offset self._offset = offset
self._repetition = repetition self._repetition = repetition
self._annotations = annotations if annotations is not None else {} self._annotations = annotations if annotations is not None else {}
self._layer = layer self._layer = layer
self._dose = dose
self._width = width self._width = width
self._cap = cap self._cap = cap
self._cap_extensions = cap_extensions self._cap_extensions = cap_extensions
@ -181,37 +179,32 @@ class Path(Shape, metaclass=AutoSlots):
self.repetition = repetition self.repetition = repetition
self.annotations = annotations if annotations is not None else {} self.annotations = annotations if annotations is not None else {}
self.layer = layer self.layer = layer
self.dose = dose
self.width = width self.width = width
self.cap = cap self.cap = cap
self.cap_extensions = cap_extensions self.cap_extensions = cap_extensions
self.rotate(rotation) self.rotate(rotation)
[self.mirror(a) for a, do in enumerate(mirrored) if do] [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Path': def __deepcopy__(self, memo: dict | None = None) -> 'Path':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self) new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._vertices = self._vertices.copy() new._vertices = self._vertices.copy()
new._cap = copy.deepcopy(self._cap, memo) new._cap = copy.deepcopy(self._cap, memo)
new._cap_extensions = copy.deepcopy(self._cap_extensions, memo) new._cap_extensions = copy.deepcopy(self._cap_extensions, memo)
new._annotations = copy.deepcopy(self._annotations) new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
@staticmethod @staticmethod
def travel( def travel(
travel_pairs: Sequence[Tuple[float, float]], travel_pairs: Sequence[tuple[float, float]],
width: float = 0.0, width: float = 0.0,
cap: PathCap = PathCap.Flush, cap: PathCap = PathCap.Flush,
cap_extensions: Optional[Tuple[float, float]] = None, cap_extensions: tuple[float, float] | None = None,
offset: ArrayLike = (0.0, 0.0), offset: ArrayLike = (0.0, 0.0),
rotation: float = 0, rotation: float = 0,
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0,
) -> 'Path': ) -> 'Path':
""" """
Build a path by specifying the turn angles and travel distances Build a path by specifying the turn angles and travel distances
@ -232,7 +225,6 @@ class Path(Shape, metaclass=AutoSlots):
`mirrored=(True, False)` results in a reflection across the x-axis, `mirrored=(True, False)` results in a reflection across the x-axis,
multiplying the path's y-coordinates by -1. Default `(False, False)` multiplying the path's y-coordinates by -1. Default `(False, False)`
layer: Layer, default `0` layer: Layer, default `0`
dose: Dose, default `1.0`
Returns: Returns:
The resulting Path object The resulting Path object
@ -247,13 +239,13 @@ class Path(Shape, metaclass=AutoSlots):
return Path(vertices=verts, width=width, cap=cap, cap_extensions=cap_extensions, return Path(vertices=verts, width=width, cap=cap, cap_extensions=cap_extensions,
offset=offset, rotation=rotation, mirrored=mirrored, offset=offset, rotation=rotation, mirrored=mirrored,
layer=layer, dose=dose) layer=layer)
def to_polygons( def to_polygons(
self, self,
poly_num_points: int = None, num_vertices: int | None = None,
poly_max_arclen: float = None, max_arclen: float | None = None,
) -> List['Polygon']: ) -> list['Polygon']:
extensions = self._calculate_cap_extensions() extensions = self._calculate_cap_extensions()
v = remove_colinear_vertices(self.vertices, closed_path=False) v = remove_colinear_vertices(self.vertices, closed_path=False)
@ -262,7 +254,7 @@ class Path(Shape, metaclass=AutoSlots):
if self.width == 0: if self.width == 0:
verts = numpy.vstack((v, v[::-1])) verts = numpy.vstack((v, v[::-1]))
return [Polygon(offset=self.offset, vertices=verts, dose=self.dose, layer=self.layer)] return [Polygon(offset=self.offset, vertices=verts, layer=self.layer)]
perp = dvdir[:, ::-1] * [[1, -1]] * self.width / 2 perp = dvdir[:, ::-1] * [[1, -1]] * self.width / 2
@ -313,13 +305,13 @@ class Path(Shape, metaclass=AutoSlots):
o1.append(v[-1] - perp[-1]) o1.append(v[-1] - perp[-1])
verts = numpy.vstack((o0, o1[::-1])) verts = numpy.vstack((o0, o1[::-1]))
polys = [Polygon(offset=self.offset, vertices=verts, dose=self.dose, layer=self.layer)] polys = [Polygon(offset=self.offset, vertices=verts, layer=self.layer)]
if self.cap == PathCap.Circle: if self.cap == PathCap.Circle:
#for vert in v: # not sure if every vertex, or just ends? #for vert in v: # not sure if every vertex, or just ends?
for vert in [v[0], v[-1]]: for vert in [v[0], v[-1]]:
circ = Circle(offset=vert, radius=self.width / 2, dose=self.dose, layer=self.layer) circ = Circle(offset=vert, radius=self.width / 2, layer=self.layer)
polys += circ.to_polygons(poly_num_points=poly_num_points, poly_max_arclen=poly_max_arclen) polys += circ.to_polygons(num_vertices=num_vertices, max_arclen=max_arclen)
return polys return polys
@ -373,15 +365,19 @@ class Path(Shape, metaclass=AutoSlots):
x_min = rotated_vertices[:, 0].argmin() x_min = rotated_vertices[:, 0].argmin()
if not is_scalar(x_min): if not is_scalar(x_min):
y_min = rotated_vertices[x_min, 1].argmin() y_min = rotated_vertices[x_min, 1].argmin()
x_min = x_min[y_min] x_min = cast(Sequence, x_min)[y_min]
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0) reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
width0 = self.width / norm_value width0 = self.width / norm_value
return ((type(self), reordered_vertices.data.tobytes(), width0, self.cap, self.layer), return ((type(self), reordered_vertices.data.tobytes(), width0, self.cap, self.layer),
(offset, scale / norm_value, rotation, False, self.dose), (offset, scale / norm_value, rotation, False),
lambda: Path(reordered_vertices * norm_value, width=self.width * norm_value, lambda: Path(
cap=self.cap, layer=self.layer)) reordered_vertices * norm_value,
width=self.width * norm_value,
cap=self.cap,
layer=self.layer,
))
def clean_vertices(self) -> 'Path': def clean_vertices(self) -> 'Path':
""" """
@ -417,29 +413,13 @@ class Path(Shape, metaclass=AutoSlots):
if self.cap == PathCap.Square: if self.cap == PathCap.Square:
extensions = numpy.full(2, self.width / 2) extensions = numpy.full(2, self.width / 2)
elif self.cap == PathCap.SquareCustom: elif self.cap == PathCap.SquareCustom:
assert(isinstance(self.cap_extensions, numpy.ndarray)) assert isinstance(self.cap_extensions, numpy.ndarray)
extensions = self.cap_extensions extensions = self.cap_extensions
else: else:
# Flush or Circle # Flush or Circle
extensions = numpy.zeros(2) extensions = numpy.zeros(2)
return extensions return extensions
def lock(self) -> 'Path':
self.vertices.flags.writeable = False
if self.cap_extensions is not None:
self.cap_extensions.flags.writeable = False
Shape.lock(self)
return self
def unlock(self) -> 'Path':
Shape.unlock(self)
self.vertices.flags.writeable = True
if self.cap_extensions is not None:
self.cap_extensions.flags.writeable = True
return self
def __repr__(self) -> str: def __repr__(self) -> str:
centroid = self.offset + self.vertices.mean(axis=0) centroid = self.offset + self.vertices.mean(axis=0)
dose = f' d{self.dose:g}' if self.dose != 1 else '' return f'<Path l{self.layer} centroid {centroid} v{len(self.vertices)} w{self.width} c{self.cap}>'
locked = ' L' if self.locked else ''
return f'<Path l{self.layer} centroid {centroid} v{len(self.vertices)} w{self.width} c{self.cap}{dose}{locked}>'

View File

@ -1,4 +1,4 @@
from typing import List, Dict, Optional, Sequence, Any from typing import Sequence, Any, cast
import copy import copy
import numpy import numpy
@ -6,21 +6,24 @@ from numpy import pi
from numpy.typing import NDArray, ArrayLike from numpy.typing import NDArray, ArrayLike
from . import Shape, normalized_shape_tuple from . import Shape, normalized_shape_tuple
from .. import PatternError from ..error import PatternError
from ..repetition import Repetition from ..repetition import Repetition
from ..utils import is_scalar, rotation_matrix_2d, layer_t, AutoSlots from ..utils import is_scalar, rotation_matrix_2d, layer_t
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
from ..traits import LockableImpl
class Polygon(Shape, metaclass=AutoSlots): class Polygon(Shape):
""" """
A polygon, consisting of a bunch of vertices (Nx2 ndarray) which specify an A polygon, consisting of a bunch of vertices (Nx2 ndarray) which specify an
implicitly-closed boundary, and an offset. implicitly-closed boundary, and an offset.
A `normalized_form(...)` is available, but can be quite slow with lots of vertices. A `normalized_form(...)` is available, but can be quite slow with lots of vertices.
""" """
__slots__ = ('_vertices',) __slots__ = (
'_vertices',
# Inherited
'_offset', '_layer', '_repetition', '_annotations',
)
_vertices: NDArray[numpy.float64] _vertices: NDArray[numpy.float64]
""" Nx2 ndarray of vertices `[[x0, y0], [x1, y1], ...]` """ """ Nx2 ndarray of vertices `[[x0, y0], [x1, y1], ...]` """
@ -80,42 +83,33 @@ class Polygon(Shape, metaclass=AutoSlots):
rotation: float = 0.0, rotation: float = 0.0,
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, repetition: Repetition | None = None,
repetition: Optional[Repetition] = None, annotations: annotations_t | None = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False, raw: bool = False,
) -> None: ) -> None:
LockableImpl.unlock(self)
self.identifier = ()
if raw: if raw:
assert(isinstance(vertices, numpy.ndarray)) assert isinstance(vertices, numpy.ndarray)
assert(isinstance(offset, numpy.ndarray)) assert isinstance(offset, numpy.ndarray)
self._vertices = vertices self._vertices = vertices
self._offset = offset self._offset = offset
self._repetition = repetition self._repetition = repetition
self._annotations = annotations if annotations is not None else {} self._annotations = annotations if annotations is not None else {}
self._layer = layer self._layer = layer
self._dose = dose
else: else:
self.vertices = vertices self.vertices = vertices
self.offset = offset self.offset = offset
self.repetition = repetition self.repetition = repetition
self.annotations = annotations if annotations is not None else {} self.annotations = annotations if annotations is not None else {}
self.layer = layer self.layer = layer
self.dose = dose
self.rotate(rotation) self.rotate(rotation)
[self.mirror(a) for a, do in enumerate(mirrored) if do] [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.set_locked(locked)
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Polygon': def __deepcopy__(self, memo: dict | None = None) -> 'Polygon':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self) new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._vertices = self._vertices.copy() new._vertices = self._vertices.copy()
new._annotations = copy.deepcopy(self._annotations) new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
@staticmethod @staticmethod
@ -125,8 +119,7 @@ class Polygon(Shape, metaclass=AutoSlots):
rotation: float = 0.0, rotation: float = 0.0,
offset: ArrayLike = (0.0, 0.0), offset: ArrayLike = (0.0, 0.0),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, repetition: Repetition | None = None,
repetition: Optional[Repetition] = None,
) -> 'Polygon': ) -> 'Polygon':
""" """
Draw a square given side_length, centered on the origin. Draw a square given side_length, centered on the origin.
@ -136,7 +129,6 @@ class Polygon(Shape, metaclass=AutoSlots):
rotation: Rotation counterclockwise, in radians rotation: Rotation counterclockwise, in radians
offset: Offset, default `(0, 0)` offset: Offset, default `(0, 0)`
layer: Layer, default `0` layer: Layer, default `0`
dose: Dose, default `1.0`
repetition: `Repetition` object, default `None` repetition: `Repetition` object, default `None`
Returns: Returns:
@ -147,8 +139,7 @@ class Polygon(Shape, metaclass=AutoSlots):
[+1, +1], [+1, +1],
[+1, -1]], dtype=float) [+1, -1]], dtype=float)
vertices = 0.5 * side_length * norm_square vertices = 0.5 * side_length * norm_square
poly = Polygon(vertices, offset=offset, layer=layer, dose=dose, poly = Polygon(vertices, offset=offset, layer=layer, repetition=repetition)
repetition=repetition)
poly.rotate(rotation) poly.rotate(rotation)
return poly return poly
@ -160,8 +151,7 @@ class Polygon(Shape, metaclass=AutoSlots):
rotation: float = 0, rotation: float = 0,
offset: ArrayLike = (0.0, 0.0), offset: ArrayLike = (0.0, 0.0),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, repetition: Repetition | None = None,
repetition: Optional[Repetition] = None,
) -> 'Polygon': ) -> 'Polygon':
""" """
Draw a rectangle with side lengths lx and ly, centered on the origin. Draw a rectangle with side lengths lx and ly, centered on the origin.
@ -172,7 +162,6 @@ class Polygon(Shape, metaclass=AutoSlots):
rotation: Rotation counterclockwise, in radians rotation: Rotation counterclockwise, in radians
offset: Offset, default `(0, 0)` offset: Offset, default `(0, 0)`
layer: Layer, default `0` layer: Layer, default `0`
dose: Dose, default `1.0`
repetition: `Repetition` object, default `None` repetition: `Repetition` object, default `None`
Returns: Returns:
@ -182,25 +171,23 @@ class Polygon(Shape, metaclass=AutoSlots):
[-lx, +ly], [-lx, +ly],
[+lx, +ly], [+lx, +ly],
[+lx, -ly]], dtype=float) [+lx, -ly]], dtype=float)
poly = Polygon(vertices, offset=offset, layer=layer, dose=dose, poly = Polygon(vertices, offset=offset, layer=layer, repetition=repetition)
repetition=repetition)
poly.rotate(rotation) poly.rotate(rotation)
return poly return poly
@staticmethod @staticmethod
def rect( def rect(
*, *,
xmin: Optional[float] = None, xmin: float | None = None,
xctr: Optional[float] = None, xctr: float | None = None,
xmax: Optional[float] = None, xmax: float | None = None,
lx: Optional[float] = None, lx: float | None = None,
ymin: Optional[float] = None, ymin: float | None = None,
yctr: Optional[float] = None, yctr: float | None = None,
ymax: Optional[float] = None, ymax: float | None = None,
ly: Optional[float] = None, ly: float | None = None,
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, repetition: Repetition | None = None,
repetition: Optional[Repetition] = None,
) -> 'Polygon': ) -> 'Polygon':
""" """
Draw a rectangle by specifying side/center positions. Draw a rectangle by specifying side/center positions.
@ -218,7 +205,6 @@ class Polygon(Shape, metaclass=AutoSlots):
ymax: Maximum y coordinate ymax: Maximum y coordinate
ly: Length along y direction ly: Length along y direction
layer: Layer, default `0` layer: Layer, default `0`
dose: Dose, default `1.0`
repetition: `Repetition` object, default `None` repetition: `Repetition` object, default `None`
Returns: Returns:
@ -226,17 +212,17 @@ class Polygon(Shape, metaclass=AutoSlots):
""" """
if lx is None: if lx is None:
if xctr is None: if xctr is None:
assert(xmin is not None) assert xmin is not None
assert(xmax is not None) assert xmax is not None
xctr = 0.5 * (xmax + xmin) xctr = 0.5 * (xmax + xmin)
lx = xmax - xmin lx = xmax - xmin
elif xmax is None: elif xmax is None:
assert(xmin is not None) assert xmin is not None
assert(xctr is not None) assert xctr is not None
lx = 2 * (xctr - xmin) lx = 2 * (xctr - xmin)
elif xmin is None: elif xmin is None:
assert(xctr is not None) assert xctr is not None
assert(xmax is not None) assert xmax is not None
lx = 2 * (xmax - xctr) lx = 2 * (xmax - xctr)
else: else:
raise PatternError('Two of xmin, xctr, xmax, lx must be None!') raise PatternError('Two of xmin, xctr, xmax, lx must be None!')
@ -244,29 +230,29 @@ class Polygon(Shape, metaclass=AutoSlots):
if xctr is not None: if xctr is not None:
pass pass
elif xmax is None: elif xmax is None:
assert(xmin is not None) assert xmin is not None
assert(lx is not None) assert lx is not None
xctr = xmin + 0.5 * lx xctr = xmin + 0.5 * lx
elif xmin is None: elif xmin is None:
assert(xmax is not None) assert xmax is not None
assert(lx is not None) assert lx is not None
xctr = xmax - 0.5 * lx xctr = xmax - 0.5 * lx
else: else:
raise PatternError('Two of xmin, xctr, xmax, lx must be None!') raise PatternError('Two of xmin, xctr, xmax, lx must be None!')
if ly is None: if ly is None:
if yctr is None: if yctr is None:
assert(ymin is not None) assert ymin is not None
assert(ymax is not None) assert ymax is not None
yctr = 0.5 * (ymax + ymin) yctr = 0.5 * (ymax + ymin)
ly = ymax - ymin ly = ymax - ymin
elif ymax is None: elif ymax is None:
assert(ymin is not None) assert ymin is not None
assert(yctr is not None) assert yctr is not None
ly = 2 * (yctr - ymin) ly = 2 * (yctr - ymin)
elif ymin is None: elif ymin is None:
assert(yctr is not None) assert yctr is not None
assert(ymax is not None) assert ymax is not None
ly = 2 * (ymax - yctr) ly = 2 * (ymax - yctr)
else: else:
raise PatternError('Two of ymin, yctr, ymax, ly must be None!') raise PatternError('Two of ymin, yctr, ymax, ly must be None!')
@ -274,31 +260,29 @@ class Polygon(Shape, metaclass=AutoSlots):
if yctr is not None: if yctr is not None:
pass pass
elif ymax is None: elif ymax is None:
assert(ymin is not None) assert ymin is not None
assert(ly is not None) assert ly is not None
yctr = ymin + 0.5 * ly yctr = ymin + 0.5 * ly
elif ymin is None: elif ymin is None:
assert(ly is not None) assert ly is not None
assert(ymax is not None) assert ymax is not None
yctr = ymax - 0.5 * ly yctr = ymax - 0.5 * ly
else: else:
raise PatternError('Two of ymin, yctr, ymax, ly must be None!') raise PatternError('Two of ymin, yctr, ymax, ly must be None!')
poly = Polygon.rectangle(lx, ly, offset=(xctr, yctr), poly = Polygon.rectangle(lx, ly, offset=(xctr, yctr), layer=layer, repetition=repetition)
layer=layer, dose=dose, repetition=repetition)
return poly return poly
@staticmethod @staticmethod
def octagon( def octagon(
*, *,
side_length: Optional[float] = None, side_length: float | None = None,
inner_radius: Optional[float] = None, inner_radius: float | None = None,
regular: bool = True, regular: bool = True,
center: ArrayLike = (0.0, 0.0), center: ArrayLike = (0.0, 0.0),
rotation: float = 0.0, rotation: float = 0.0,
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, repetition: Repetition | None = None,
repetition: Optional[Repetition] = None,
) -> 'Polygon': ) -> 'Polygon':
""" """
Draw an octagon given one of (side length, inradius, circumradius). Draw an octagon given one of (side length, inradius, circumradius).
@ -317,7 +301,6 @@ class Polygon(Shape, metaclass=AutoSlots):
`0` results in four axis-aligned sides (the long sides of the `0` results in four axis-aligned sides (the long sides of the
irregular octagon). irregular octagon).
layer: Layer, default `0` layer: Layer, default `0`
dose: Dose, default `1.0`
repetition: `Repetition` object, default `None` repetition: `Repetition` object, default `None`
Returns: Returns:
@ -344,16 +327,15 @@ class Polygon(Shape, metaclass=AutoSlots):
side_length = 2 * inner_radius / s side_length = 2 * inner_radius / s
vertices = 0.5 * side_length * norm_oct vertices = 0.5 * side_length * norm_oct
poly = Polygon(vertices, offset=center, layer=layer, dose=dose, repetition=repetition) poly = Polygon(vertices, offset=center, layer=layer, repetition=repetition)
poly.rotate(rotation) poly.rotate(rotation)
return poly return poly
def to_polygons( def to_polygons(
self, self,
poly_num_points: int = None, # unused num_vertices: int | None = None, # unused
poly_max_arclen: float = None, # unused max_arclen: float | None = None, # unused
) -> List['Polygon']: ) -> list['Polygon']:
return [copy.deepcopy(self)] return [copy.deepcopy(self)]
def get_bounds(self) -> NDArray[numpy.float64]: def get_bounds(self) -> NDArray[numpy.float64]:
@ -391,13 +373,13 @@ class Polygon(Shape, metaclass=AutoSlots):
x_min = rotated_vertices[:, 0].argmin() x_min = rotated_vertices[:, 0].argmin()
if not is_scalar(x_min): if not is_scalar(x_min):
y_min = rotated_vertices[x_min, 1].argmin() y_min = rotated_vertices[x_min, 1].argmin()
x_min = x_min[y_min] x_min = cast(Sequence, x_min)[y_min]
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0) reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
# TODO: normalize mirroring? # TODO: normalize mirroring?
return ((type(self), reordered_vertices.data.tobytes(), self.layer), return ((type(self), reordered_vertices.data.tobytes(), self.layer),
(offset, scale / norm_value, rotation, False, self.dose), (offset, scale / norm_value, rotation, False),
lambda: Polygon(reordered_vertices * norm_value, layer=self.layer)) lambda: Polygon(reordered_vertices * norm_value, layer=self.layer))
def clean_vertices(self) -> 'Polygon': def clean_vertices(self) -> 'Polygon':
@ -430,18 +412,6 @@ class Polygon(Shape, metaclass=AutoSlots):
self.vertices = remove_colinear_vertices(self.vertices, closed_path=True) self.vertices = remove_colinear_vertices(self.vertices, closed_path=True)
return self return self
def lock(self) -> 'Polygon':
self.vertices.flags.writeable = False
Shape.lock(self)
return self
def unlock(self) -> 'Polygon':
Shape.unlock(self)
self.vertices.flags.writeable = True
return self
def __repr__(self) -> str: def __repr__(self) -> str:
centroid = self.offset + self.vertices.mean(axis=0) centroid = self.offset + self.vertices.mean(axis=0)
dose = f' d{self.dose:g}' if self.dose != 1 else '' return f'<Polygon l{self.layer} centroid {centroid} v{len(self.vertices)}>'
locked = ' L' if self.locked else ''
return f'<Polygon l{self.layer} centroid {centroid} v{len(self.vertices)}{dose}{locked}>'

View File

@ -1,57 +1,54 @@
from typing import List, Tuple, Callable, TypeVar, Optional, TYPE_CHECKING from typing import Callable, Self, TYPE_CHECKING
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import numpy import numpy
from numpy.typing import NDArray, ArrayLike from numpy.typing import NDArray, ArrayLike
from ..traits import (PositionableImpl, LayerableImpl, DoseableImpl, from ..traits import (
Rotatable, Mirrorable, Copyable, Scalable, Rotatable, Mirrorable, Copyable, Scalable,
PivotableImpl, LockableImpl, RepeatableImpl, PositionableImpl, LayerableImpl,
AnnotatableImpl) PivotableImpl, RepeatableImpl, AnnotatableImpl,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from . import Polygon from . import Polygon
# Type definitions # Type definitions
normalized_shape_tuple = Tuple[Tuple, normalized_shape_tuple = tuple[
Tuple[NDArray[numpy.float64], float, float, bool, float], tuple,
Callable[[], 'Shape']] tuple[NDArray[numpy.float64], float, float, bool],
Callable[[], 'Shape'],
]
# ## Module-wide defaults # ## Module-wide defaults
# Default number of points per polygon for shapes # Default number of points per polygon for shapes
DEFAULT_POLY_NUM_POINTS = 24 DEFAULT_POLY_NUM_VERTICES = 24
T = TypeVar('T', bound='Shape') class Shape(PositionableImpl, LayerableImpl, Rotatable, Mirrorable, Copyable, Scalable,
PivotableImpl, RepeatableImpl, AnnotatableImpl, metaclass=ABCMeta):
class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable, Copyable, Scalable,
PivotableImpl, RepeatableImpl, LockableImpl, AnnotatableImpl, metaclass=ABCMeta):
""" """
Abstract class specifying functions common to all shapes. Class specifying functions common to all shapes.
""" """
__slots__ = () # Children should use AutoSlots __slots__ = () # Children should use AutoSlots
identifier: Tuple def __copy__(self) -> Self:
""" An arbitrary identifier for the shape, usually empty but used by `Pattern.flatten()` """
def __copy__(self) -> 'Shape':
cls = self.__class__ cls = self.__class__
new = cls.__new__(cls) new = cls.__new__(cls)
for name in self.__slots__: # type: str for name in self.__slots__: # type: str
object.__setattr__(new, name, getattr(self, name)) object.__setattr__(new, name, getattr(self, name))
return new return new
''' #
--- Abstract methods # Methods (abstract)
''' #
@abstractmethod @abstractmethod
def to_polygons( def to_polygons(
self, self,
num_vertices: Optional[int] = None, num_vertices: int | None = None,
max_arclen: Optional[float] = None, max_arclen: float | None = None,
) -> List['Polygon']: ) -> list['Polygon']:
""" """
Returns a list of polygons which approximate the shape. Returns a list of polygons which approximate the shape.
@ -68,9 +65,9 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
pass pass
@abstractmethod @abstractmethod
def normalized_form(self: T, norm_value: int) -> normalized_shape_tuple: def normalized_form(self, norm_value: int) -> normalized_shape_tuple:
""" """
Writes the shape in a standardized notation, with offset, scale, rotation, and dose Writes the shape in a standardized notation, with offset, scale, and rotation
information separated out from the remaining values. information separated out from the remaining values.
Args: Args:
@ -85,20 +82,20 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
`(intrinsic, extrinsic, constructor)`. These are further broken down as: `(intrinsic, extrinsic, constructor)`. These are further broken down as:
`intrinsic`: A tuple of basic types containing all information about the instance that `intrinsic`: A tuple of basic types containing all information about the instance that
is not contained in 'extrinsic'. Usually, `intrinsic[0] == type(self)`. is not contained in 'extrinsic'. Usually, `intrinsic[0] == type(self)`.
`extrinsic`: `([x_offset, y_offset], scale, rotation, mirror_across_x_axis, dose)` `extrinsic`: `([x_offset, y_offset], scale, rotation, mirror_across_x_axis)`
`constructor`: A callable (no arguments) which returns an instance of `type(self)` with `constructor`: A callable (no arguments) which returns an instance of `type(self)` with
internal state equivalent to `intrinsic`. internal state equivalent to `intrinsic`.
""" """
pass pass
''' #
---- Non-abstract methods # Non-abstract methods
''' #
def manhattanize_fast( def manhattanize_fast(
self, self,
grid_x: ArrayLike, grid_x: ArrayLike,
grid_y: ArrayLike, grid_y: ArrayLike,
) -> List['Polygon']: ) -> list['Polygon']:
""" """
Returns a list of polygons with grid-aligned ("Manhattan") edges approximating the shape. Returns a list of polygons with grid-aligned ("Manhattan") edges approximating the shape.
@ -175,14 +172,14 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
return inds return inds
# Find the y indices on all x gridlines # Find the y indices on all x gridlines
xs = gx[gxi_min:gxi_max] xs = gx[int(gxi_min):int(gxi_max)]
inds = get_grid_inds(xs) inds = get_grid_inds(xs)
# Find y-intersections for x-midpoints # Find y-intersections for x-midpoints
xs2 = (xs[:-1] + xs[1:]) / 2 xs2 = (xs[:-1] + xs[1:]) / 2
inds2 = get_grid_inds(xs2) inds2 = get_grid_inds(xs2)
xinds = numpy.rint(numpy.arange(gxi_min, gxi_max - 0.99, 1 / 3), dtype=numpy.int64, casting='unsafe') xinds = numpy.rint(numpy.arange(gxi_min, gxi_max - 0.99, 1 / 3)).astype(numpy.int64)
# interleave the results # interleave the results
yinds = xinds.copy() yinds = xinds.copy()
@ -197,12 +194,10 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
vertex_lists.append(vlist) vertex_lists.append(vlist)
polygon_contours.append(numpy.vstack(vertex_lists)) polygon_contours.append(numpy.vstack(vertex_lists))
manhattan_polygons = [] manhattan_polygons = [
for contour in polygon_contours: Polygon(vertices=contour, layer=self.layer)
manhattan_polygons.append(Polygon( for contour in polygon_contours
vertices=contour, ]
layer=self.layer,
dose=self.dose))
return manhattan_polygons return manhattan_polygons
@ -210,7 +205,7 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
self, self,
grid_x: ArrayLike, grid_x: ArrayLike,
grid_y: ArrayLike, grid_y: ArrayLike,
) -> List['Polygon']: ) -> list['Polygon']:
""" """
Returns a list of polygons with grid-aligned ("Manhattan") edges approximating the shape. Returns a list of polygons with grid-aligned ("Manhattan") edges approximating the shape.
@ -293,23 +288,13 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
for contour in contours: for contour in contours:
# /2 deals with supersampling # /2 deals with supersampling
# +.5 deals with the fact that our 0-edge becomes -.5 in the super-sampled contour output # +.5 deals with the fact that our 0-edge becomes -.5 in the super-sampled contour output
snapped_contour = numpy.rint((contour + .5) / 2, dtype=numpy.int64, casting='unsafe') snapped_contour = numpy.rint((contour + .5) / 2).astype(numpy.int64)
vertices = numpy.hstack((grx[snapped_contour[:, None, 0] + offset_i[0]], vertices = numpy.hstack((grx[snapped_contour[:, None, 0] + offset_i[0]],
gry[snapped_contour[:, None, 1] + offset_i[1]])) gry[snapped_contour[:, None, 1] + offset_i[1]]))
manhattan_polygons.append(Polygon( manhattan_polygons.append(Polygon(
vertices=vertices, vertices=vertices,
layer=self.layer, layer=self.layer,
dose=self.dose)) ))
return manhattan_polygons return manhattan_polygons
def lock(self: T) -> T:
PositionableImpl._lock(self)
LockableImpl.lock(self)
return self
def unlock(self: T) -> T:
LockableImpl.unlock(self)
PositionableImpl._unlock(self)
return self

View File

@ -1,4 +1,4 @@
from typing import List, Tuple, Dict, Sequence, Optional, Any from typing import Sequence, Any
import copy import copy
import numpy import numpy
@ -6,24 +6,27 @@ from numpy import pi, inf
from numpy.typing import NDArray, ArrayLike from numpy.typing import NDArray, ArrayLike
from . import Shape, Polygon, normalized_shape_tuple from . import Shape, Polygon, normalized_shape_tuple
from .. import PatternError from ..error import PatternError
from ..repetition import Repetition from ..repetition import Repetition
from ..traits import RotatableImpl from ..traits import RotatableImpl
from ..utils import is_scalar, get_bit, normalize_mirror, layer_t, AutoSlots from ..utils import is_scalar, get_bit, normalize_mirror, layer_t
from ..utils import annotations_t from ..utils import annotations_t
from ..traits import LockableImpl
# Loaded on use: # Loaded on use:
# from freetype import Face # from freetype import Face
# from matplotlib.path import Path # from matplotlib.path import Path
class Text(RotatableImpl, Shape, metaclass=AutoSlots): class Text(RotatableImpl, Shape):
""" """
Text (to be printed e.g. as a set of polygons). Text (to be printed e.g. as a set of polygons).
This is distinct from non-printed Label objects. This is distinct from non-printed Label objects.
""" """
__slots__ = ('_string', '_height', '_mirrored', 'font_path') __slots__ = (
'_string', '_height', '_mirrored', 'font_path',
# Inherited
'_offset', '_layer', '_repetition', '_annotations', '_rotation',
)
_string: str _string: str
_height: float _height: float
@ -71,20 +74,15 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
rotation: float = 0.0, rotation: float = 0.0,
mirrored: ArrayLike = (False, False), mirrored: ArrayLike = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, repetition: Repetition | None = None,
repetition: Optional[Repetition] = None, annotations: annotations_t | None = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False, raw: bool = False,
) -> None: ) -> None:
LockableImpl.unlock(self)
self.identifier = ()
if raw: if raw:
assert(isinstance(offset, numpy.ndarray)) assert isinstance(offset, numpy.ndarray)
assert(isinstance(mirrored, numpy.ndarray)) assert isinstance(mirrored, numpy.ndarray)
self._offset = offset self._offset = offset
self._layer = layer self._layer = layer
self._dose = dose
self._string = string self._string = string
self._height = height self._height = height
self._rotation = rotation self._rotation = rotation
@ -94,7 +92,6 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
else: else:
self.offset = offset self.offset = offset
self.layer = layer self.layer = layer
self.dose = dose
self.string = string self.string = string
self.height = height self.height = height
self.rotation = rotation self.rotation = rotation
@ -102,23 +99,20 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
self.repetition = repetition self.repetition = repetition
self.annotations = annotations if annotations is not None else {} self.annotations = annotations if annotations is not None else {}
self.font_path = font_path self.font_path = font_path
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Text': def __deepcopy__(self, memo: dict | None = None) -> 'Text':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self) new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._mirrored = copy.deepcopy(self._mirrored, memo) new._mirrored = copy.deepcopy(self._mirrored, memo)
new._annotations = copy.deepcopy(self._annotations) new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
def to_polygons( def to_polygons(
self, self,
poly_num_points: Optional[int] = None, # unused num_vertices: int | None = None, # unused
poly_max_arclen: Optional[float] = None, # unused max_arclen: float | None = None, # unused
) -> List[Polygon]: ) -> list[Polygon]:
all_polygons = [] all_polygons = []
total_advance = 0.0 total_advance = 0.0
for char in self.string: for char in self.string:
@ -126,7 +120,7 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
# Move these polygons to the right of the previous letter # Move these polygons to the right of the previous letter
for xys in raw_polys: for xys in raw_polys:
poly = Polygon(xys, dose=self.dose, layer=self.layer) poly = Polygon(xys, layer=self.layer)
poly.mirror2d(self.mirrored) poly.mirror2d(self.mirrored)
poly.scale_by(self.height) poly.scale_by(self.height)
poly.offset = self.offset + [total_advance, 0] poly.offset = self.offset + [total_advance, 0]
@ -151,13 +145,15 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
rotation += self.rotation rotation += self.rotation
rotation %= 2 * pi rotation %= 2 * pi
return ((type(self), self.string, self.font_path, self.layer), return ((type(self), self.string, self.font_path, self.layer),
(self.offset, self.height / norm_value, rotation, mirror_x, self.dose), (self.offset, self.height / norm_value, rotation, mirror_x),
lambda: Text(string=self.string, lambda: Text(
string=self.string,
height=self.height * norm_value, height=self.height * norm_value,
font_path=self.font_path, font_path=self.font_path,
rotation=rotation, rotation=rotation,
mirrored=(mirror_x, False), mirrored=(mirror_x, False),
layer=self.layer)) layer=self.layer,
))
def get_bounds(self) -> NDArray[numpy.float64]: def get_bounds(self) -> NDArray[numpy.float64]:
# rotation makes this a huge pain when using slot.advance and glyph.bbox(), so # rotation makes this a huge pain when using slot.advance and glyph.bbox(), so
@ -176,7 +172,7 @@ def get_char_as_polygons(
font_path: str, font_path: str,
char: str, char: str,
resolution: float = 48 * 64, resolution: float = 48 * 64,
) -> Tuple[List[List[List[float]]], float]: ) -> tuple[list[list[list[float]]], float]:
from freetype import Face # type: ignore from freetype import Face # type: ignore
from matplotlib.path import Path # type: ignore from matplotlib.path import Path # type: ignore
@ -213,7 +209,7 @@ def get_char_as_polygons(
tags = outline.tags[start:end + 1] tags = outline.tags[start:end + 1]
tags.append(tags[0]) tags.append(tags[0])
segments: List[List[List[float]]] = [] segments: list[list[list[float]]] = []
for j, point in enumerate(points): for j, point in enumerate(points):
# If we already have a segment, add this point to it # If we already have a segment, add this point to it
if j > 0: if j > 0:
@ -259,19 +255,7 @@ def get_char_as_polygons(
return polygons, advance return polygons, advance
def lock(self) -> 'Text':
self.mirrored.flags.writeable = False
Shape.lock(self)
return self
def unlock(self) -> 'Text':
Shape.unlock(self)
self.mirrored.flags.writeable = True
return self
def __repr__(self) -> str: def __repr__(self) -> str:
rotation = f'{self.rotation*180/pi:g}' if self.rotation != 0 else '' rotation = f'{self.rotation*180/pi:g}' if self.rotation != 0 else ''
dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else ''
mirrored = ' m{:d}{:d}'.format(*self.mirrored) if self.mirrored.any() else '' mirrored = ' m{:d}{:d}'.format(*self.mirrored) if self.mirrored.any() else ''
return f'<TextShape "{self.string}" l{self.layer} o{self.offset} h{self.height:g}{rotation}{mirrored}{dose}{locked}>' return f'<TextShape "{self.string}" l{self.layer} o{self.offset} h{self.height:g}{rotation}{mirrored}>'

View File

@ -1,248 +0,0 @@
"""
SubPattern provides basic support for nesting Pattern objects within each other, by adding
offset, rotation, scaling, and other such properties to the reference.
"""
#TODO more top-level documentation
from typing import Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any, TypeVar
import copy
import numpy
from numpy import pi
from numpy.typing import NDArray, ArrayLike
from .error import PatternError
from .utils import is_scalar, AutoSlots, annotations_t
from .repetition import Repetition
from .traits import (PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl,
Mirrorable, PivotableImpl, Copyable, LockableImpl, RepeatableImpl,
AnnotatableImpl)
if TYPE_CHECKING:
from . import Pattern
S = TypeVar('S', bound='SubPattern')
class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mirrorable,
PivotableImpl, Copyable, RepeatableImpl, LockableImpl, AnnotatableImpl,
metaclass=AutoSlots):
"""
SubPattern provides basic support for nesting Pattern objects within each other, by adding
offset, rotation, scaling, and associated methods.
"""
__slots__ = ('_pattern',
'_mirrored',
'identifier',
)
_pattern: Optional['Pattern']
""" The `Pattern` being instanced """
_mirrored: NDArray[numpy.bool_]
""" Whether to mirror the instance across the x and/or y axes. """
identifier: Tuple[Any, ...]
""" Arbitrary identifier, used internally by some `masque` functions. """
def __init__(
self,
pattern: Optional['Pattern'],
*,
offset: ArrayLike = (0.0, 0.0),
rotation: float = 0.0,
mirrored: Optional[Sequence[bool]] = None,
dose: float = 1.0,
scale: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
identifier: Tuple[Any, ...] = (),
) -> None:
"""
Args:
pattern: Pattern to reference.
offset: (x, y) offset applied to the referenced pattern. Not affected by rotation etc.
rotation: Rotation (radians, counterclockwise) relative to the referenced pattern's (0, 0).
mirrored: Whether to mirror the referenced pattern across its x and y axes.
dose: Scaling factor applied to the dose.
scale: Scaling factor applied to the pattern's geometry.
repetition: TODO
locked: Whether the `SubPattern` is locked after initialization.
identifier: Arbitrary tuple, used internally by some `masque` functions.
"""
LockableImpl.unlock(self)
self.identifier = identifier
self.pattern = pattern
self.offset = offset
self.rotation = rotation
self.dose = dose
self.scale = scale
if mirrored is None:
mirrored = (False, False)
self.mirrored = mirrored
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.set_locked(locked)
def __copy__(self) -> 'SubPattern':
new = SubPattern(pattern=self.pattern,
offset=self.offset.copy(),
rotation=self.rotation,
dose=self.dose,
scale=self.scale,
mirrored=self.mirrored.copy(),
repetition=copy.deepcopy(self.repetition),
annotations=copy.deepcopy(self.annotations),
locked=self.locked)
return new
def __deepcopy__(self, memo: Dict = None) -> 'SubPattern':
memo = {} if memo is None else memo
new = copy.copy(self)
LockableImpl.unlock(new)
new.pattern = copy.deepcopy(self.pattern, memo)
new.repetition = copy.deepcopy(self.repetition, memo)
new.annotations = copy.deepcopy(self.annotations, memo)
new.set_locked(self.locked)
return new
# pattern property
@property
def pattern(self) -> Optional['Pattern']:
return self._pattern
@pattern.setter
def pattern(self, val: Optional['Pattern']) -> None:
from .pattern import Pattern
if val is not None and not isinstance(val, Pattern):
raise PatternError(f'Provided pattern {val} is not a Pattern object or None!')
self._pattern = val
# Mirrored property
@property
def mirrored(self) -> Any: #TODO mypy#3004 NDArray[numpy.bool_]:
return self._mirrored
@mirrored.setter
def mirrored(self, val: ArrayLike) -> None:
if is_scalar(val):
raise PatternError('Mirrored must be a 2-element list of booleans')
self._mirrored = numpy.array(val, dtype=bool, copy=True)
def as_pattern(self) -> 'Pattern':
"""
Returns:
A copy of self.pattern which has been scaled, rotated, etc. according to this
`SubPattern`'s properties.
"""
assert(self.pattern is not None)
pattern = self.pattern.deepcopy().deepunlock()
if self.scale != 1:
pattern.scale_by(self.scale)
if numpy.any(self.mirrored):
pattern.mirror2d(self.mirrored)
if self.rotation % (2 * pi) != 0:
pattern.rotate_around((0.0, 0.0), self.rotation)
if numpy.any(self.offset):
pattern.translate_elements(self.offset)
if self.dose != 1:
pattern.scale_element_doses(self.dose)
if self.repetition is not None:
combined = type(pattern)(name='__repetition__')
for dd in self.repetition.displacements:
temp_pat = pattern.deepcopy()
temp_pat.translate_elements(dd)
combined.append(temp_pat)
pattern = combined
return pattern
def rotate(self: S, rotation: float) -> S:
self.rotation += rotation
if self.repetition is not None:
self.repetition.rotate(rotation)
return self
def mirror(self: S, axis: int) -> S:
self.mirrored[axis] = not self.mirrored[axis]
self.rotation *= -1
if self.repetition is not None:
self.repetition.mirror(axis)
return self
def get_bounds(self) -> Optional[NDArray[numpy.float64]]:
"""
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
extent of the `SubPattern` in each dimension.
Returns `None` if the contained `Pattern` is empty.
Returns:
`[[x_min, y_min], [x_max, y_max]]` or `None`
"""
if self.pattern is None:
return None
return self.as_pattern().get_bounds()
def lock(self: S) -> S:
"""
Lock the SubPattern, disallowing changes
Returns:
self
"""
self.mirrored.flags.writeable = False
PositionableImpl._lock(self)
LockableImpl.lock(self)
return self
def unlock(self: S) -> S:
"""
Unlock the SubPattern
Returns:
self
"""
LockableImpl.unlock(self)
PositionableImpl._unlock(self)
self.mirrored.flags.writeable = True
return self
def deeplock(self: S) -> S:
"""
Recursively lock the SubPattern and its contained pattern
Returns:
self
"""
assert(self.pattern is not None)
self.lock()
self.pattern.deeplock()
return self
def deepunlock(self: S) -> S:
"""
Recursively unlock the SubPattern and its contained pattern
This is dangerous unless you have just performed a deepcopy, since
the subpattern and its components may be used in more than one once!
Returns:
self
"""
assert(self.pattern is not None)
self.unlock()
self.pattern.deepunlock()
return self
def __repr__(self) -> str:
name = self.pattern.name if self.pattern is not None else None
rotation = f' r{self.rotation*180/pi:g}' if self.rotation != 0 else ''
scale = f' d{self.scale:g}' if self.scale != 1 else ''
mirrored = ' m{:d}{:d}'.format(*self.mirrored) if self.mirrored.any() else ''
dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else ''
return f'<SubPattern "{name}" at {self.offset}{rotation}{scale}{mirrored}{dose}{locked}>'

View File

@ -3,11 +3,9 @@ Traits (mixins) and default implementations
""" """
from .positionable import Positionable, PositionableImpl from .positionable import Positionable, PositionableImpl
from .layerable import Layerable, LayerableImpl from .layerable import Layerable, LayerableImpl
from .doseable import Doseable, DoseableImpl
from .rotatable import Rotatable, RotatableImpl, Pivotable, PivotableImpl from .rotatable import Rotatable, RotatableImpl, Pivotable, PivotableImpl
from .repeatable import Repeatable, RepeatableImpl from .repeatable import Repeatable, RepeatableImpl
from .scalable import Scalable, ScalableImpl from .scalable import Scalable, ScalableImpl
from .mirrorable import Mirrorable from .mirrorable import Mirrorable
from .copyable import Copyable from .copyable import Copyable
from .lockable import Lockable, LockableImpl
from .annotatable import Annotatable, AnnotatableImpl from .annotatable import Annotatable, AnnotatableImpl

View File

@ -1,4 +1,3 @@
from typing import TypeVar
#from types import MappingProxyType #from types import MappingProxyType
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
@ -6,20 +5,19 @@ from ..utils import annotations_t
from ..error import MasqueError from ..error import MasqueError
T = TypeVar('T', bound='Annotatable') _empty_slots = () # Workaround to get mypy to ignore intentionally empty slots for superclass
I = TypeVar('I', bound='AnnotatableImpl')
class Annotatable(metaclass=ABCMeta): class Annotatable(metaclass=ABCMeta):
""" """
Abstract class for all annotatable entities Trait class for all annotatable entities
Annotations correspond to GDS/OASIS "properties" Annotations correspond to GDS/OASIS "properties"
""" """
__slots__ = () __slots__ = ()
''' #
---- Properties # Properties
''' #
@property @property
@abstractmethod @abstractmethod
def annotations(self) -> annotations_t: def annotations(self) -> annotations_t:
@ -33,23 +31,20 @@ class AnnotatableImpl(Annotatable, metaclass=ABCMeta):
""" """
Simple implementation of `Annotatable`. Simple implementation of `Annotatable`.
""" """
__slots__ = () __slots__ = _empty_slots
_annotations: annotations_t _annotations: annotations_t
""" Dictionary storing annotation name/value pairs """ """ Dictionary storing annotation name/value pairs """
''' #
---- Non-abstract properties # Non-abstract properties
''' #
@property @property
def annotations(self) -> annotations_t: def annotations(self) -> annotations_t:
return self._annotations return self._annotations
# # TODO: Find a way to make sure the subclass implements Lockable without dealing with diamond inheritance or this extra hasattr
# if hasattr(self, 'is_locked') and self.is_locked():
# return MappingProxyType(self._annotations)
@annotations.setter @annotations.setter
def annotations(self, annotations: annotations_t): def annotations(self, annotations: annotations_t) -> None:
if not isinstance(annotations, dict): if not isinstance(annotations, dict):
raise MasqueError(f'annotations expected dict, got {type(annotations)}') raise MasqueError(f'annotations expected dict, got {type(annotations)}')
self._annotations = annotations self._annotations = annotations

View File

@ -1,21 +1,18 @@
from typing import TypeVar from typing import Self
from abc import ABCMeta from abc import ABCMeta
import copy import copy
T = TypeVar('T', bound='Copyable')
class Copyable(metaclass=ABCMeta): class Copyable(metaclass=ABCMeta):
""" """
Abstract class which adds .copy() and .deepcopy() Trait class which adds .copy() and .deepcopy()
""" """
__slots__ = () __slots__ = ()
''' #
---- Non-abstract methods # Non-abstract methods
''' #
def copy(self: T) -> T: def copy(self) -> Self:
""" """
Return a shallow copy of the object. Return a shallow copy of the object.
@ -24,7 +21,7 @@ class Copyable(metaclass=ABCMeta):
""" """
return copy.copy(self) return copy.copy(self)
def deepcopy(self: T) -> T: def deepcopy(self) -> Self:
""" """
Return a deep copy of the object. Return a deep copy of the object.

View File

@ -1,76 +0,0 @@
from typing import TypeVar
from abc import ABCMeta, abstractmethod
from ..error import MasqueError
T = TypeVar('T', bound='Doseable')
I = TypeVar('I', bound='DoseableImpl')
class Doseable(metaclass=ABCMeta):
"""
Abstract class for all doseable entities
"""
__slots__ = ()
'''
---- Properties
'''
@property
@abstractmethod
def dose(self) -> float:
"""
Dose (float >= 0)
"""
pass
# @dose.setter
# @abstractmethod
# def dose(self, val: float):
# pass
'''
---- Methods
'''
def set_dose(self: T, dose: float) -> T:
"""
Set the dose
Args:
dose: new value for dose
Returns:
self
"""
pass
class DoseableImpl(Doseable, metaclass=ABCMeta):
"""
Simple implementation of Doseable
"""
__slots__ = ()
_dose: float
""" Dose """
'''
---- Non-abstract properties
'''
@property
def dose(self) -> float:
return self._dose
@dose.setter
def dose(self, val: float):
if not val >= 0:
raise MasqueError('Dose must be non-negative')
self._dose = val
'''
---- Non-abstract methods
'''
def set_dose(self: I, dose: float) -> I:
self.dose = dose
return self

View File

@ -1,21 +1,21 @@
from typing import TypeVar from typing import Self
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from ..utils import layer_t from ..utils import layer_t
T = TypeVar('T', bound='Layerable') _empty_slots = () # Workaround to get mypy to ignore intentionally empty slots for superclass
I = TypeVar('I', bound='LayerableImpl')
class Layerable(metaclass=ABCMeta): class Layerable(metaclass=ABCMeta):
""" """
Abstract class for all layerable entities Trait class for all layerable entities
""" """
__slots__ = () __slots__ = ()
'''
---- Properties #
''' # Properties
#
@property @property
@abstractmethod @abstractmethod
def layer(self) -> layer_t: def layer(self) -> layer_t:
@ -29,10 +29,11 @@ class Layerable(metaclass=ABCMeta):
# def layer(self, val: layer_t): # def layer(self, val: layer_t):
# pass # pass
''' #
---- Methods # Methods
''' #
def set_layer(self: T, layer: layer_t) -> T: @abstractmethod
def set_layer(self, layer: layer_t) -> Self:
""" """
Set the layer Set the layer
@ -49,14 +50,14 @@ class LayerableImpl(Layerable, metaclass=ABCMeta):
""" """
Simple implementation of Layerable Simple implementation of Layerable
""" """
__slots__ = () __slots__ = _empty_slots
_layer: layer_t _layer: layer_t
""" Layer number, pair, or name """ """ Layer number, pair, or name """
''' #
---- Non-abstract properties # Non-abstract properties
''' #
@property @property
def layer(self) -> layer_t: def layer(self) -> layer_t:
return self._layer return self._layer
@ -65,9 +66,9 @@ class LayerableImpl(Layerable, metaclass=ABCMeta):
def layer(self, val: layer_t): def layer(self, val: layer_t):
self._layer = val self._layer = val
''' #
---- Non-abstract methods # Non-abstract methods
''' #
def set_layer(self: I, layer: layer_t) -> I: def set_layer(self, layer: layer_t) -> Self:
self.layer = layer self.layer = layer
return self return self

View File

@ -1,103 +0,0 @@
from typing import TypeVar, Dict, Tuple, Any
from abc import ABCMeta, abstractmethod
from ..error import PatternLockedError
T = TypeVar('T', bound='Lockable')
I = TypeVar('I', bound='LockableImpl')
class Lockable(metaclass=ABCMeta):
"""
Abstract class for all lockable entities
"""
__slots__ = () # type: Tuple[str, ...]
'''
---- Methods
'''
@abstractmethod
def lock(self: T) -> T:
"""
Lock the object, disallowing further changes
Returns:
self
"""
pass
@abstractmethod
def unlock(self: T) -> T:
"""
Unlock the object, reallowing changes
Returns:
self
"""
pass
@abstractmethod
def is_locked(self) -> bool:
"""
Returns:
True if the object is locked
"""
pass
def set_locked(self: T, locked: bool) -> T:
"""
Locks or unlocks based on the argument.
No action if already in the requested state.
Args:
locked: State to set.
Returns:
self
"""
if locked != self.is_locked():
if locked:
self.lock()
else:
self.unlock()
return self
class LockableImpl(Lockable, metaclass=ABCMeta):
"""
Simple implementation of Lockable
"""
__slots__ = () # type: Tuple[str, ...]
locked: bool
""" If `True`, disallows changes to the object """
'''
---- Non-abstract methods
'''
def __setattr__(self, name, value):
if self.locked and name != 'locked':
raise PatternLockedError()
object.__setattr__(self, name, value)
def __getstate__(self) -> Dict[str, Any]:
if hasattr(self, '__slots__'):
return {key: getattr(self, key) for key in self.__slots__}
else:
return self.__dict__
def __setstate__(self, state: Dict[str, Any]) -> None:
for k, v in state.items():
object.__setattr__(self, k, v)
def lock(self: I) -> I:
object.__setattr__(self, 'locked', True)
return self
def unlock(self: I) -> I:
object.__setattr__(self, 'locked', False)
return self
def is_locked(self) -> bool:
return self.locked

View File

@ -1,22 +1,15 @@
from typing import TypeVar, Tuple from typing import Self
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
T = TypeVar('T', bound='Mirrorable')
#I = TypeVar('I', bound='MirrorableImpl')
class Mirrorable(metaclass=ABCMeta): class Mirrorable(metaclass=ABCMeta):
""" """
Abstract class for all mirrorable entities Trait class for all mirrorable entities
""" """
__slots__ = () __slots__ = ()
'''
---- Abstract methods
'''
@abstractmethod @abstractmethod
def mirror(self: T, axis: int) -> T: def mirror(self, axis: int) -> Self:
""" """
Mirror the entity across an axis. Mirror the entity across an axis.
@ -28,7 +21,7 @@ class Mirrorable(metaclass=ABCMeta):
""" """
pass pass
def mirror2d(self: T, axes: Tuple[bool, bool]) -> T: def mirror2d(self, axes: tuple[bool, bool]) -> Self:
""" """
Optionally mirror the entity across both axes Optionally mirror the entity across both axes
@ -54,9 +47,9 @@ class Mirrorable(metaclass=ABCMeta):
# _mirrored: numpy.ndarray # ndarray[bool] # _mirrored: numpy.ndarray # ndarray[bool]
# """ Whether to mirror the instance across the x and/or y axes. """ # """ Whether to mirror the instance across the x and/or y axes. """
# #
# ''' # #
# ---- Properties # # Properties
# ''' # #
# # Mirrored property # # Mirrored property
# @property # @property
# def mirrored(self) -> numpy.ndarray: # ndarray[bool] # def mirrored(self) -> numpy.ndarray: # ndarray[bool]
@ -69,6 +62,6 @@ class Mirrorable(metaclass=ABCMeta):
# raise MasqueError('Mirrored must be a 2-element list of booleans') # raise MasqueError('Mirrored must be a 2-element list of booleans')
# self._mirrored = numpy.array(val, dtype=bool, copy=True) # self._mirrored = numpy.array(val, dtype=bool, copy=True)
# #
# ''' # #
# ---- Methods # # Methods
# ''' # #

View File

@ -1,6 +1,6 @@
# TODO top-level comment about how traits should set __slots__ = (), and how to use AutoSlots # TODO top-level comment about how traits should set __slots__ = (), and how to use AutoSlots
from typing import TypeVar, Any, Optional from typing import Self, Any
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import numpy import numpy
@ -9,19 +9,18 @@ from numpy.typing import NDArray, ArrayLike
from ..error import MasqueError from ..error import MasqueError
T = TypeVar('T', bound='Positionable') _empty_slots = () # Workaround to get mypy to ignore intentionally empty slots for superclass
I = TypeVar('I', bound='PositionableImpl')
class Positionable(metaclass=ABCMeta): class Positionable(metaclass=ABCMeta):
""" """
Abstract class for all positionable entities Trait class for all positionable entities
""" """
__slots__ = () __slots__ = ()
''' #
---- Abstract properties # Properties
''' #
@property @property
@abstractmethod @abstractmethod
def offset(self) -> NDArray[numpy.float64]: def offset(self) -> NDArray[numpy.float64]:
@ -30,13 +29,13 @@ class Positionable(metaclass=ABCMeta):
""" """
pass pass
# @offset.setter @offset.setter
# @abstractmethod @abstractmethod
# def offset(self, val: ArrayLike): def offset(self, val: ArrayLike) -> None:
# pass pass
@abstractmethod @abstractmethod
def set_offset(self: T, offset: ArrayLike) -> T: def set_offset(self, offset: ArrayLike) -> Self:
""" """
Set the offset Set the offset
@ -49,7 +48,7 @@ class Positionable(metaclass=ABCMeta):
pass pass
@abstractmethod @abstractmethod
def translate(self: T, offset: ArrayLike) -> T: def translate(self, offset: ArrayLike) -> Self:
""" """
Translate the entity by the given offset Translate the entity by the given offset
@ -62,7 +61,7 @@ class Positionable(metaclass=ABCMeta):
pass pass
@abstractmethod @abstractmethod
def get_bounds(self) -> Optional[NDArray[numpy.float64]]: def get_bounds(self) -> NDArray[numpy.float64] | None:
""" """
Returns `[[x_min, y_min], [x_max, y_max]]` which specify a minimal bounding box for the entity. Returns `[[x_min, y_min], [x_max, y_max]]` which specify a minimal bounding box for the entity.
Returns `None` for an empty entity. Returns `None` for an empty entity.
@ -77,7 +76,7 @@ class Positionable(metaclass=ABCMeta):
This is handy for destructuring like `xy_min, xy_max = entity.get_bounds_nonempty()` This is handy for destructuring like `xy_min, xy_max = entity.get_bounds_nonempty()`
""" """
bounds = self.get_bounds() bounds = self.get_bounds()
assert(bounds is not None) assert bounds is not None
return bounds return bounds
@ -85,14 +84,14 @@ class PositionableImpl(Positionable, metaclass=ABCMeta):
""" """
Simple implementation of Positionable Simple implementation of Positionable
""" """
__slots__ = () __slots__ = _empty_slots
_offset: NDArray[numpy.float64] _offset: NDArray[numpy.float64]
""" `[x_offset, y_offset]` """ """ `[x_offset, y_offset]` """
''' #
---- Properties # Properties
''' #
# offset property # offset property
@property @property
def offset(self) -> Any: # TODO mypy#3003 NDArray[numpy.float64]: def offset(self) -> Any: # TODO mypy#3003 NDArray[numpy.float64]:
@ -108,35 +107,15 @@ class PositionableImpl(Positionable, metaclass=ABCMeta):
if val.size != 2: if val.size != 2:
raise MasqueError('Offset must be convertible to size-2 ndarray') raise MasqueError('Offset must be convertible to size-2 ndarray')
self._offset = val.flatten() self._offset = val.flatten() # type: ignore
''' #
---- Methods # Methods
''' #
def set_offset(self: I, offset: ArrayLike) -> I: def set_offset(self, offset: ArrayLike) -> Self:
self.offset = offset self.offset = offset
return self return self
def translate(self: I, offset: ArrayLike) -> I: def translate(self, offset: ArrayLike) -> Self:
self._offset += offset # type: ignore # NDArray += ArrayLike should be fine?? self._offset += offset # type: ignore # NDArray += ArrayLike should be fine??
return self return self
def _lock(self: I) -> I:
"""
Lock the entity, disallowing further changes
Returns:
self
"""
self._offset.flags.writeable = False
return self
def _unlock(self: I) -> I:
"""
Unlock the entity
Returns:
self
"""
self._offset.flags.writeable = True
return self

View File

@ -1,29 +1,28 @@
from typing import TypeVar, Optional, TYPE_CHECKING from typing import Self, TYPE_CHECKING
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from ..error import MasqueError from ..error import MasqueError
_empty_slots = () # Workaround to get mypy to ignore intentionally empty slots for superclass
if TYPE_CHECKING: if TYPE_CHECKING:
from ..repetition import Repetition from ..repetition import Repetition
T = TypeVar('T', bound='Repeatable')
I = TypeVar('I', bound='RepeatableImpl')
class Repeatable(metaclass=ABCMeta): class Repeatable(metaclass=ABCMeta):
""" """
Abstract class for all repeatable entities Trait class for all repeatable entities
""" """
__slots__ = () __slots__ = ()
''' #
---- Properties # Properties
''' #
@property @property
@abstractmethod @abstractmethod
def repetition(self) -> Optional['Repetition']: def repetition(self) -> 'Repetition | None':
""" """
Repetition object, or None (single instance only) Repetition object, or None (single instance only)
""" """
@ -31,14 +30,14 @@ class Repeatable(metaclass=ABCMeta):
# @repetition.setter # @repetition.setter
# @abstractmethod # @abstractmethod
# def repetition(self, repetition: Optional['Repetition']): # def repetition(self, repetition: 'Repetition | None'):
# pass # pass
''' #
---- Methods # Methods
''' #
@abstractmethod @abstractmethod
def set_repetition(self: T, repetition: Optional['Repetition']) -> T: def set_repetition(self, repetition: 'Repetition | None') -> Self:
""" """
Set the repetition Set the repetition
@ -55,28 +54,28 @@ class RepeatableImpl(Repeatable, metaclass=ABCMeta):
""" """
Simple implementation of `Repeatable` Simple implementation of `Repeatable`
""" """
__slots__ = () __slots__ = _empty_slots
_repetition: Optional['Repetition'] _repetition: 'Repetition | None'
""" Repetition object, or None (single instance only) """ """ Repetition object, or None (single instance only) """
''' #
---- Non-abstract properties # Non-abstract properties
''' #
@property @property
def repetition(self) -> Optional['Repetition']: def repetition(self) -> 'Repetition | None':
return self._repetition return self._repetition
@repetition.setter @repetition.setter
def repetition(self, repetition: Optional['Repetition']): def repetition(self, repetition: 'Repetition | None'):
from ..repetition import Repetition from ..repetition import Repetition
if repetition is not None and not isinstance(repetition, Repetition): if repetition is not None and not isinstance(repetition, Repetition):
raise MasqueError(f'{repetition} is not a valid Repetition object!') raise MasqueError(f'{repetition} is not a valid Repetition object!')
self._repetition = repetition self._repetition = repetition
''' #
---- Non-abstract methods # Non-abstract methods
''' #
def set_repetition(self: I, repetition: Optional['Repetition']) -> I: def set_repetition(self, repetition: 'Repetition | None') -> Self:
self.repetition = repetition self.repetition = repetition
return self return self

View File

@ -1,31 +1,29 @@
from typing import TypeVar from typing import Self, cast, Any
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import numpy import numpy
from numpy import pi from numpy import pi
from numpy.typing import ArrayLike, NDArray from numpy.typing import ArrayLike
#from .positionable import Positionable from .positionable import Positionable
from ..error import MasqueError from ..error import MasqueError
from ..utils import is_scalar, rotation_matrix_2d from ..utils import rotation_matrix_2d
T = TypeVar('T', bound='Rotatable')
I = TypeVar('I', bound='RotatableImpl') _empty_slots = () # Workaround to get mypy to ignore intentionally empty slots for superclass
P = TypeVar('P', bound='Pivotable')
J = TypeVar('J', bound='PivotableImpl')
class Rotatable(metaclass=ABCMeta): class Rotatable(metaclass=ABCMeta):
""" """
Abstract class for all rotatable entities Trait class for all rotatable entities
""" """
__slots__ = () __slots__ = ()
''' #
---- Abstract methods # Methods
''' #
@abstractmethod @abstractmethod
def rotate(self: T, val: float) -> T: def rotate(self, val: float) -> Self:
""" """
Rotate the shape around its origin (0, 0), ignoring its offset. Rotate the shape around its origin (0, 0), ignoring its offset.
@ -42,14 +40,14 @@ class RotatableImpl(Rotatable, metaclass=ABCMeta):
""" """
Simple implementation of `Rotatable` Simple implementation of `Rotatable`
""" """
__slots__ = () __slots__ = _empty_slots
_rotation: float _rotation: float
""" rotation for the object, radians counterclockwise """ """ rotation for the object, radians counterclockwise """
''' #
---- Properties # Properties
''' #
@property @property
def rotation(self) -> float: def rotation(self) -> float:
""" Rotation, radians counterclockwise """ """ Rotation, radians counterclockwise """
@ -61,14 +59,14 @@ class RotatableImpl(Rotatable, metaclass=ABCMeta):
raise MasqueError('Rotation must be a scalar') raise MasqueError('Rotation must be a scalar')
self._rotation = val % (2 * pi) self._rotation = val % (2 * pi)
''' #
---- Methods # Methods
''' #
def rotate(self: I, rotation: float) -> I: def rotate(self, rotation: float) -> Self:
self.rotation += rotation self.rotation += rotation
return self return self
def set_rotation(self: I, rotation: float) -> I: def set_rotation(self, rotation: float) -> Self:
""" """
Set the rotation to a value Set the rotation to a value
@ -84,13 +82,13 @@ class RotatableImpl(Rotatable, metaclass=ABCMeta):
class Pivotable(metaclass=ABCMeta): class Pivotable(metaclass=ABCMeta):
""" """
Abstract class for entites which can be rotated around a point. Trait class for entites which can be rotated around a point.
This requires that they are `Positionable` but not necessarily `Rotatable` themselves. This requires that they are `Positionable` but not necessarily `Rotatable` themselves.
""" """
__slots__ = () __slots__ = ()
@abstractmethod @abstractmethod
def rotate_around(self: P, pivot: ArrayLike, rotation: float) -> P: def rotate_around(self, pivot: ArrayLike, rotation: float) -> Self:
""" """
Rotate the object around a point. Rotate the object around a point.
@ -110,11 +108,14 @@ class PivotableImpl(Pivotable, metaclass=ABCMeta):
""" """
__slots__ = () __slots__ = ()
def rotate_around(self: J, pivot: ArrayLike, rotation: float) -> J: offset: Any # TODO see if we can get around defining `offset` in PivotableImpl
""" `[x_offset, y_offset]` """
def rotate_around(self, pivot: ArrayLike, rotation: float) -> Self:
pivot = numpy.array(pivot, dtype=float) pivot = numpy.array(pivot, dtype=float)
self.translate(-pivot) cast(Positionable, self).translate(-pivot)
self.rotate(rotation) cast(Rotatable, self).rotate(rotation)
self.offset = numpy.dot(rotation_matrix_2d(rotation), self.offset) # type: ignore # TODO: mypy#3004 self.offset = numpy.dot(rotation_matrix_2d(rotation), self.offset) # type: ignore # TODO: mypy#3004
self.translate(+pivot) cast(Positionable, self).translate(+pivot)
return self return self

View File

@ -1,25 +1,24 @@
from typing import TypeVar from typing import Self
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
from ..error import MasqueError from ..error import MasqueError
from ..utils import is_scalar from ..utils import is_scalar
T = TypeVar('T', bound='Scalable') _empty_slots = () # Workaround to get mypy to ignore intentionally empty slots for superclass
I = TypeVar('I', bound='ScalableImpl')
class Scalable(metaclass=ABCMeta): class Scalable(metaclass=ABCMeta):
""" """
Abstract class for all scalable entities Trait class for all scalable entities
""" """
__slots__ = () __slots__ = ()
''' #
---- Abstract methods # Methods
''' #
@abstractmethod @abstractmethod
def scale_by(self: T, c: float) -> T: def scale_by(self, c: float) -> Self:
""" """
Scale the entity by a factor Scale the entity by a factor
@ -36,14 +35,14 @@ class ScalableImpl(Scalable, metaclass=ABCMeta):
""" """
Simple implementation of Scalable Simple implementation of Scalable
""" """
__slots__ = () __slots__ = _empty_slots
_scale: float _scale: float
""" scale factor for the entity """ """ scale factor for the entity """
''' #
---- Properties # Properties
''' #
@property @property
def scale(self) -> float: def scale(self) -> float:
return self._scale return self._scale
@ -56,14 +55,14 @@ class ScalableImpl(Scalable, metaclass=ABCMeta):
raise MasqueError('Scale must be positive') raise MasqueError('Scale must be positive')
self._scale = val self._scale = val
''' #
---- Methods # Methods
''' #
def scale_by(self: I, c: float) -> I: def scale_by(self, c: float) -> Self:
self.scale *= c self.scale *= c
return self return self
def set_scale(self: I, scale: float) -> I: def set_scale(self, scale: float) -> Self:
""" """
Set the sclae to a value Set the sclae to a value

View File

@ -1,165 +0,0 @@
"""
Various helper functions
"""
from typing import Any, Union, Tuple, Sequence, Dict, List
from abc import ABCMeta
import numpy
from numpy.typing import NDArray, ArrayLike
# Type definitions
layer_t = Union[int, Tuple[int, int], str]
annotations_t = Dict[str, List[Union[int, float, str]]]
def is_scalar(var: Any) -> bool:
"""
Alias for 'not hasattr(var, "__len__")'
Args:
var: Checks if `var` has a length.
"""
return not hasattr(var, "__len__")
def get_bit(bit_string: Any, bit_id: int) -> bool:
"""
Interprets bit number `bit_id` from the right (lsb) of `bit_string` as a boolean
Args:
bit_string: Bit string to test
bit_id: Bit number, 0-indexed from the right (lsb)
Returns:
Boolean value of the requested bit
"""
return bit_string & (1 << bit_id) != 0
def set_bit(bit_string: Any, bit_id: int, value: bool) -> Any:
"""
Returns `bit_string`, with bit number `bit_id` set to boolean `value`.
Args:
bit_string: Bit string to alter
bit_id: Bit number, 0-indexed from right (lsb)
value: Boolean value to set bit to
Returns:
Altered `bit_string`
"""
mask = (1 << bit_id)
bit_string &= ~mask
if value:
bit_string |= mask
return bit_string
def rotation_matrix_2d(theta: float) -> NDArray[numpy.float64]:
"""
2D rotation matrix for rotating counterclockwise around the origin.
Args:
theta: Angle to rotate, in radians
Returns:
rotation matrix
"""
return numpy.array([[numpy.cos(theta), -numpy.sin(theta)],
[numpy.sin(theta), +numpy.cos(theta)]])
def normalize_mirror(mirrored: Sequence[bool]) -> Tuple[bool, float]:
"""
Converts 0-2 mirror operations `(mirror_across_x_axis, mirror_across_y_axis)`
into 0-1 mirror operations and a rotation
Args:
mirrored: `(mirror_across_x_axis, mirror_across_y_axis)`
Returns:
`mirror_across_x_axis` (bool) and
`angle_to_rotate` in radians
"""
mirrored_x, mirrored_y = mirrored
mirror_x = (mirrored_x != mirrored_y) # XOR
angle = numpy.pi if mirrored_y else 0
return mirror_x, angle
def remove_duplicate_vertices(vertices: ArrayLike, closed_path: bool = True) -> NDArray[numpy.float64]:
"""
Given a list of vertices, remove any consecutive duplicates.
Args:
vertices: `[[x0, y0], [x1, y1], ...]`
closed_path: If True, `vertices` is interpreted as an implicity-closed path
(i.e. the last vertex will be removed if it is the same as the first)
Returns:
`vertices` with no consecutive duplicates.
"""
vertices = numpy.array(vertices)
duplicates = (vertices == numpy.roll(vertices, 1, axis=0)).all(axis=1)
if not closed_path:
duplicates[0] = False
return vertices[~duplicates]
def remove_colinear_vertices(vertices: ArrayLike, closed_path: bool = True) -> NDArray[numpy.float64]:
"""
Given a list of vertices, remove any superflous vertices (i.e.
those which lie along the line formed by their neighbors)
Args:
vertices: Nx2 ndarray of vertices
closed_path: If `True`, the vertices are assumed to represent an implicitly
closed path. If `False`, the path is assumed to be open. Default `True`.
Returns:
`vertices` with colinear (superflous) vertices removed.
"""
vertices = remove_duplicate_vertices(vertices)
# Check for dx0/dy0 == dx1/dy1
dv = numpy.roll(vertices, -1, axis=0) - vertices # [y1-y0, y2-y1, ...]
dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1] # [[dx0*(dy_-1), (dx_-1)*dy0], dx1*dy0, dy1*dx0]]
dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0]
err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40
slopes_equal = (dxdy_diff / err_mult) < 1e-15
if not closed_path:
slopes_equal[[0, -1]] = False
return vertices[~slopes_equal]
class AutoSlots(ABCMeta):
"""
Metaclass for automatically generating __slots__ based on superclass type annotations.
Superclasses must set `__slots__ = ()` to make this work properly.
This is a workaround for the fact that non-empty `__slots__` can't be used
with multiple inheritance. Since we only use multiple inheritance with abstract
classes, they can have empty `__slots__` and their attribute type annotations
can be used to generate a full `__slots__` for the concrete class.
"""
def __new__(cls, name, bases, dctn):
parents = set()
for base in bases:
parents |= set(base.mro())
slots = tuple(dctn.get('__slots__', tuple()))
for parent in parents:
if not hasattr(parent, '__annotations__'):
continue
slots += tuple(getattr(parent, '__annotations__').keys())
dctn['__slots__'] = slots
return super().__new__(cls, name, bases, dctn)

View File

@ -1,15 +1,18 @@
""" """
Various helper functions, type definitions, etc. Various helper functions, type definitions, etc.
""" """
from .types import layer_t, annotations_t from .types import layer_t, annotations_t, SupportsBool
from .array import is_scalar from .array import is_scalar
from .autoslots import AutoSlots from .autoslots import AutoSlots
from .deferreddict import DeferredDict
from .decorators import oneshot
from .bitwise import get_bit, set_bit from .bitwise import get_bit, set_bit
from .vertices import ( from .vertices import (
remove_duplicate_vertices, remove_colinear_vertices, poly_contains_points remove_duplicate_vertices, remove_colinear_vertices, poly_contains_points
) )
from .transform import rotation_matrix_2d, normalize_mirror from .transform import rotation_matrix_2d, normalize_mirror, rotate_offsets_around
from . import ports2data
#from . import pack2d #from . import pack2d

View File

@ -0,0 +1,21 @@
from typing import Callable
from functools import wraps
from ..error import OneShotError
def oneshot(func: Callable) -> Callable:
"""
Raises a OneShotError if the decorated function is called more than once
"""
expired = False
@wraps(func)
def wrapper(*args, **kwargs):
nonlocal expired
if expired:
raise OneShotError(func.__name__)
expired = True
return func(*args, **kwargs)
return wrapper

View File

@ -1,37 +1,14 @@
""" """
2D bin-packing 2D bin-packing
""" """
from typing import Tuple, List, Set, Sequence, Callable from typing import Sequence, Callable, Mapping
import numpy import numpy
from numpy.typing import NDArray, ArrayLike from numpy.typing import NDArray, ArrayLike
from ..error import MasqueError from ..error import MasqueError
from ..pattern import Pattern from ..pattern import Pattern
from ..subpattern import SubPattern from ..ref import Ref
def pack_patterns(patterns: Sequence[Pattern],
regions: numpy.ndarray,
spacing: Tuple[float, float],
presort: bool = True,
allow_rejects: bool = True,
packer: Callable = maxrects_bssf,
) -> Tuple[Pattern, List[Pattern]]:
half_spacing = numpy.array(spacing) / 2
bounds = [pp.get_bounds() for pp in patterns]
sizes = [bb[1] - bb[0] + spacing if bb is not None else spacing for bb in bounds]
offsets = [half_spacing - bb[0] if bb is not None else (0, 0) for bb in bounds]
locations, reject_inds = packer(sizes, regions, presort=presort, allow_rejects=allow_rejects)
pat = Pattern()
pat.subpatterns = [SubPattern(pp, offset=oo + loc)
for pp, oo, loc in zip(patterns, offsets, locations)]
rejects = [patterns[ii] for ii in reject_inds]
return pat, rejects
def maxrects_bssf( def maxrects_bssf(
@ -39,7 +16,7 @@ def maxrects_bssf(
containers: ArrayLike, containers: ArrayLike,
presort: bool = True, presort: bool = True,
allow_rejects: bool = True, allow_rejects: bool = True,
) -> Tuple[NDArray[numpy.float64], Set[int]]: ) -> tuple[NDArray[numpy.float64], set[int]]:
""" """
sizes should be Nx2 sizes should be Nx2
regions should be Mx4 (xmin, ymin, xmax, ymax) regions should be Mx4 (xmin, ymin, xmax, ymax)
@ -111,7 +88,7 @@ def guillotine_bssf_sas(rect_sizes: numpy.ndarray,
regions: numpy.ndarray, regions: numpy.ndarray,
presort: bool = True, presort: bool = True,
allow_rejects: bool = True, allow_rejects: bool = True,
) -> Tuple[numpy.ndarray, Set[int]]: ) -> tuple[numpy.ndarray, set[int]]:
""" """
sizes should be Nx2 sizes should be Nx2
regions should be Mx4 (xmin, ymin, xmax, ymax) regions should be Mx4 (xmin, ymin, xmax, ymax)
@ -163,3 +140,28 @@ def guillotine_bssf_sas(rect_sizes: numpy.ndarray,
new_region0, new_region1)) new_region0, new_region1))
return rect_locs, rejected_inds return rect_locs, rejected_inds
def pack_patterns(
library: Mapping[str, Pattern],
patterns: Sequence[str],
regions: numpy.ndarray,
spacing: tuple[float, float],
presort: bool = True,
allow_rejects: bool = True,
packer: Callable = maxrects_bssf,
) -> tuple[Pattern, list[str]]:
half_spacing = numpy.array(spacing) / 2
bounds = [library[pp].get_bounds() for pp in patterns]
sizes = [bb[1] - bb[0] + spacing if bb is not None else spacing for bb in bounds]
offsets = [half_spacing - bb[0] if bb is not None else (0, 0) for bb in bounds]
locations, reject_inds = packer(sizes, regions, presort=presort, allow_rejects=allow_rejects)
pat = Pattern()
pat.refs = [Ref(pp, offset=oo + loc)
for pp, oo, loc in zip(patterns, offsets, locations)]
rejects = [patterns[ii] for ii in reject_inds]
return pat, rejects

177
masque/utils/ports2data.py Normal file
View File

@ -0,0 +1,177 @@
"""
Functions for writing port data into Pattern geometry/annotations/labels (`ports_to_data`)
and retrieving it (`data_to_ports`).
These use the format 'name:ptype angle_deg' written into labels, which are placed at
the port locations. This particular approach is just a sensible default; feel free to
to write equivalent functions for your own format or alternate storage methods.
"""
from typing import Sequence, Mapping
import logging
import numpy
from ..pattern import Pattern
from ..label import Label
from ..utils import layer_t
from ..ports import Port
from ..error import PatternError
from ..library import ILibraryView, LibraryView
logger = logging.getLogger(__name__)
def ports_to_data(pattern: Pattern, layer: layer_t) -> Pattern:
"""
Place a text label at each port location, specifying the port data in the format
'name:ptype angle_deg'
This can be used to debug port locations or to automatically generate ports
when reading in a GDS file.
NOTE that `pattern` is modified by this function
Args:
pattern: The pattern which is to have its ports labeled. MODIFIED in-place.
layer: The layer on which the labels will be placed.
Returns:
`pattern`
"""
for name, port in pattern.ports.items():
if port.rotation is None:
angle_deg = numpy.inf
else:
angle_deg = numpy.rad2deg(port.rotation)
pattern.labels += [
Label(string=f'{name}:{port.ptype} {angle_deg:g}', layer=layer, offset=port.offset)
]
return pattern
def data_to_ports(
layers: Sequence[layer_t],
library: Mapping[str, Pattern],
pattern: Pattern, # Pattern is good since we don't want to do library[name] to avoid infinite recursion.
# LazyLibrary protects against library[ref.target] causing a circular lookup.
# For others, maybe check for cycles up front? TODO
name: str | None = None, # Note: name optional, but arg order different from read(postprocess=)
max_depth: int = 0,
skip_subcells: bool = True,
# TODO missing ok?
) -> Pattern:
"""
# TODO fixup documentation in port_utils
# TODO move port_utils to utils.file?
Examine `pattern` for labels specifying port info, and use that info
to fill out its `ports` attribute.
Labels are assumed to be placed at the port locations, and have the format
'name:ptype angle_deg'
Args:
layers: Search for labels on all the given layers.
pattern: Pattern object to scan for labels.
max_depth: Maximum hierarcy depth to search. Default 999_999.
Reduce this to 0 to avoid ever searching subcells.
skip_subcells: If port labels are found at a given hierarcy level,
do not continue searching at deeper levels. This allows subcells
to contain their own port info without interfering with supercells'
port data.
Default True.
Returns:
The updated `pattern`. Port labels are not removed.
"""
if pattern.ports:
logger.warning(f'Pattern {name if name else pattern} already had ports, skipping data_to_ports')
return pattern
if not isinstance(library, ILibraryView):
library = LibraryView(library)
data_to_ports_flat(layers, pattern, name)
if (skip_subcells and pattern.ports) or max_depth == 0:
return pattern
# Load ports for all subpatterns, and use any we find
found_ports = False
for target in set(rr.target for rr in pattern.refs):
if target is None:
continue
pp = data_to_ports(
layers=layers,
library=library,
pattern=library[target],
name=target,
max_depth=max_depth - 1,
skip_subcells=skip_subcells,
)
found_ports |= bool(pp.ports)
if not found_ports:
return pattern
for ref in pattern.refs:
if ref.target is None:
continue
aa = library.abstract(ref.target)
if not aa.ports:
continue
aa.apply_ref_transform(ref)
pattern.check_ports(other_names=aa.ports.keys())
pattern.ports.update(aa.ports)
return pattern
def data_to_ports_flat(
layers: Sequence[layer_t],
pattern: Pattern,
cell_name: str | None = None,
) -> Pattern:
"""
Examine `pattern` for labels specifying port info, and use that info
to fill out its `ports` attribute.
Labels are assumed to be placed at the port locations, and have the format
'name:ptype angle_deg'
The pattern is assumed to be flat (have no `refs`) and have no pre-existing ports.
Args:
layers: Search for labels on all the given layers.
pattern: Pattern object to scan for labels.
cell_name: optional, used for warning message only
Returns:
The updated `pattern`. Port labels are not removed.
"""
labels = [ll for ll in pattern.labels if ll.layer in layers]
if not labels:
return pattern
pstr = cell_name if cell_name is not None else repr(pattern)
if pattern.ports:
raise PatternError('Pattern "{pstr}" has pre-existing ports!')
local_ports = {}
for label in labels:
name, property_string = label.string.split(':')
properties = property_string.split(' ')
ptype = properties[0]
angle_deg = float(properties[1]) if len(ptype) else 0
xy = label.offset
angle = numpy.deg2rad(angle_deg)
if name in local_ports:
logger.warning(f'Duplicate port "{name}" in pattern "{pstr}"')
local_ports[name] = Port(offset=xy, rotation=angle, ptype=ptype)
pattern.ports.update(local_ports)
return pattern

View File

@ -1,7 +1,7 @@
""" """
Geometric transforms Geometric transforms
""" """
from typing import Sequence, Tuple from typing import Sequence
import numpy import numpy
from numpy.typing import NDArray from numpy.typing import NDArray
@ -21,7 +21,7 @@ def rotation_matrix_2d(theta: float) -> NDArray[numpy.float64]:
[numpy.sin(theta), +numpy.cos(theta)]]) [numpy.sin(theta), +numpy.cos(theta)]])
def normalize_mirror(mirrored: Sequence[bool]) -> Tuple[bool, float]: def normalize_mirror(mirrored: Sequence[bool]) -> tuple[bool, float]:
""" """
Converts 0-2 mirror operations `(mirror_across_x_axis, mirror_across_y_axis)` Converts 0-2 mirror operations `(mirror_across_x_axis, mirror_across_y_axis)`
into 0-1 mirror operations and a rotation into 0-1 mirror operations and a rotation
@ -38,3 +38,17 @@ def normalize_mirror(mirrored: Sequence[bool]) -> Tuple[bool, float]:
mirror_x = (mirrored_x != mirrored_y) # XOR mirror_x = (mirrored_x != mirrored_y) # XOR
angle = numpy.pi if mirrored_y else 0 angle = numpy.pi if mirrored_y else 0
return mirror_x, angle return mirror_x, angle
def rotate_offsets_around(
offsets: NDArray[numpy.float64],
pivot: NDArray[numpy.float64],
angle: float,
) -> NDArray[numpy.float64]:
"""
Rotates offsets around a pivot point.
"""
offsets -= pivot
offsets[:] = (rotation_matrix_2d(angle) @ offsets.T).T
offsets += pivot
return offsets

View File

@ -1,8 +1,13 @@
""" """
Type definitions Type definitions
""" """
from typing import Union, Tuple, Sequence, Dict, List from typing import Protocol
layer_t = Union[int, Tuple[int, int], str] layer_t = int | tuple[int, int] | str
annotations_t = Dict[str, List[Union[int, float, str]]] annotations_t = dict[str, list[int | float | str]]
class SupportsBool(Protocol):
def __bool__(self) -> bool:
...

View File

@ -77,13 +77,13 @@ def poly_contains_points(
vertices = numpy.array(vertices, copy=False) vertices = numpy.array(vertices, copy=False)
if points.size == 0: if points.size == 0:
return numpy.zeros(0) return numpy.zeros(0, dtype=numpy.int8)
min_bounds = numpy.min(vertices, axis=0)[None, :] min_bounds = numpy.min(vertices, axis=0)[None, :]
max_bounds = numpy.max(vertices, axis=0)[None, :] max_bounds = numpy.max(vertices, axis=0)[None, :]
trivially_outside = ((points < min_bounds).any(axis=1) trivially_outside = ((points < min_bounds).any(axis=1)
| (points > max_bounds).any(axis=1)) | (points > max_bounds).any(axis=1)) # noqa: E128
nontrivial = ~trivially_outside nontrivial = ~trivially_outside
if trivially_outside.all(): if trivially_outside.all():
@ -101,10 +101,10 @@ def poly_contains_points(
dv = numpy.roll(verts, -1, axis=0) - verts dv = numpy.roll(verts, -1, axis=0) - verts
is_left = (dv[:, 0] * (ntpts[..., 1] - verts[:, 1]) # >0 if left of dv, <0 if right, 0 if on the line is_left = (dv[:, 0] * (ntpts[..., 1] - verts[:, 1]) # >0 if left of dv, <0 if right, 0 if on the line
- dv[:, 1] * (ntpts[..., 0] - verts[:, 0])) - dv[:, 1] * (ntpts[..., 0] - verts[:, 0])) # noqa: E128
winding_number = ((upward & (is_left > 0)).sum(axis=0) winding_number = ((upward & (is_left > 0)).sum(axis=0)
- (downward & (is_left < 0)).sum(axis=0)) - (downward & (is_left < 0)).sum(axis=0)) # noqa: E128
nontrivial_inside = winding_number != 0 # filter nontrivial points based on winding number nontrivial_inside = winding_number != 0 # filter nontrivial points based on winding number
if include_boundary: if include_boundary:
@ -113,5 +113,3 @@ def poly_contains_points(
inside = nontrivial.copy() inside = nontrivial.copy()
inside[nontrivial] = nontrivial_inside inside[nontrivial] = nontrivial_inside
return inside return inside

View File

@ -52,9 +52,8 @@ path = "masque/__init__.py"
[project.optional-dependencies] [project.optional-dependencies]
oasis = ["fatamorgana~=0.11"] oasis = ["fatamorgana~=0.11"]
dxf = ["ezdxf"] dxf = ["ezdxf~=1.0.2"]
svg = ["svgwrite"] svg = ["svgwrite"]
visualize = ["matplotlib"] visualize = ["matplotlib"]
text = ["matplotlib", "freetype-py"] text = ["matplotlib", "freetype-py"]
python-gdsii = ["python-gdsii"]