Back to home page

EIC code displayed by LXR

 
 

    


File indexing completed on 2026-04-10 07:49:17

0001 #!/usr/bin/env python
0002 #
0003 # Copyright (c) 2019 Opticks Team. All Rights Reserved.
0004 #
0005 # This file is part of Opticks
0006 # (see https://bitbucket.org/simoncblyth/opticks).
0007 #
0008 # Licensed under the Apache License, Version 2.0 (the "License"); 
0009 # you may not use this file except in compliance with the License.  
0010 # You may obtain a copy of the License at
0011 #
0012 #   http://www.apache.org/licenses/LICENSE-2.0
0013 #
0014 # Unless required by applicable law or agreed to in writing, software 
0015 # distributed under the License is distributed on an "AS IS" BASIS, 
0016 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  
0017 # See the License for the specific language governing permissions and 
0018 # limitations under the License.
0019 #
0020 
0021 
0022 import logging, hashlib, sys, os
0023 import numpy as np
0024 np.set_printoptions(precision=2) 
0025 
0026 
0027 from opticks.ana.base import opticks_main, manual_mixin
0028 from opticks.ana.nbase import Buf
0029 
0030 from ddbase import Dddb
0031 from ddpart import Parts, ddpart_manual_mixin
0032 
0033 from geom import Part
0034 
0035 log = logging.getLogger(__name__)
0036 
0037 
0038 from opticks.analytic.treebase import Tree, Node
0039 
0040 
0041 class NodePartitioner(object):
0042     """
0043     All NodePartitioner methods are added to treebase.Node 
0044     on calling the below treepart_manual_mixin function 
0045     """ 
0046     def parts(self):
0047         """
0048         Divvy up geometry into parts that 
0049         split "intersection" into union lists. This boils
0050         down to judicious choice of bounding box according 
0051         to intersects of the source gemetry.
0052         """
0053         if not hasattr(self, '_parts'):
0054             _parts = self.lv.parts()
0055             for p in _parts:
0056                 p.node = self
0057             pass
0058             self._parts = _parts 
0059         pass
0060         return self._parts
0061 
0062     def num_parts(self):
0063         parts = self.parts()
0064         return len(parts)
0065 
0066 
0067 class TreePartitioner(object):
0068     """
0069     All TreePartitioner methods are added to treebase.Tree
0070     on calling the below treepart_manual_mixin function 
0071     """ 
0072     @classmethod
0073     def num_parts(cls):
0074         nn = cls.num_nodes()
0075         tot = 0 
0076         for i in range(nn):
0077             node = cls.get(i)
0078             tot += node.num_parts()
0079         pass
0080         return tot
0081 
0082     @classmethod
0083     def parts(cls):
0084         tnodes = cls.num_nodes() 
0085         tparts = cls.num_parts() 
0086         log.info("tnodes %s tparts %s " % (tnodes, tparts))
0087 
0088         pts = Parts()
0089         gcsg = []
0090 
0091         for i in range(tnodes):
0092             node = cls.get(i)
0093 
0094             log.debug("tree.parts node %s parent %s" % (repr(node),repr(node.parent)))
0095             log.info("tree.parts node.lv %s " % (repr(node.lv)))
0096             log.info("tree.parts node.pv %s " % (repr(node.pv)))
0097 
0098             npts = node.parts()
0099             pts.extend(npts)    
0100 
0101             if hasattr(npts, 'gcsg') and len(npts.gcsg) > 0:
0102                 for c in npts.gcsg:
0103                     c.node = node
0104                 pass
0105                 gcsg.extend(npts.gcsg)  
0106             pass
0107         pass
0108         assert len(pts) == tparts          
0109         pts.gcsg = gcsg 
0110         return pts 
0111 
0112     @classmethod
0113     def convert(cls, parts, explode=0.):
0114         """
0115         :param parts: array of parts, essentially just a list of part instances
0116         :return: np.array buffer of parts
0117 
0118         Tree.convert
0119 
0120         #. collect Part instances from each of the nodes into list
0121         #. serialize parts into array, converting relationships into indices
0122         #. this cannot live at lower level as serialization demands to 
0123            allocate all at once and fill in the content, also conversion
0124            of relationships to indices demands an all at once conversion
0125 
0126         Five solids of DYB PMT represented in part buffer
0127 
0128         * part.typecode 1:sphere, 2:tubs
0129         * part.flags, only 1 for tubs
0130         * part.node.index 0,1,2,3,4  (0:4pt,1:4pt,2:2pt,3:1pt,4:1pt) 
0131 
0132         ::
0133 
0134             In [19]: p.buf.view(np.int32)[:,(1,2,3),3]
0135             Out[19]: 
0136               Buf([[0, 1, 0],       part.flags, part.typecode, nodeindex    
0137                    [0, 1, 0],
0138                    [0, 1, 0],
0139                    [1, 2, 0],
0140 
0141                    [0, 1, 1],
0142                    [0, 1, 1],
0143                    [0, 1, 1],
0144                    [1, 2, 1],
0145 
0146                    [0, 1, 2],
0147                    [0, 1, 2],
0148 
0149                    [0, 1, 3],
0150 
0151                    [0, 2, 4]], dtype=int32)
0152 
0153 
0154             In [22]: p.buf.view(np.int32)[:,1,1]     # 1-based part index
0155             Out[22]: Buf([ 1,  2,  3,  4,  5,  6,  7,  8,  9, 10, 11, 12], dtype=int32)
0156 
0157 
0158         * where are the typecodes hailing from, not using OpticksCSG.h enum ?
0159           nope hardcoded into geom.py Part.__init__  Sphere:1, Tubs:2 Box:3
0160 
0161       
0162 
0163         """
0164         data = np.zeros([len(parts),4,4],dtype=np.float32)
0165         for i,part in enumerate(parts):
0166             #print "part (%d) tc %d  %r " % (i, part.typecode, part)
0167             data[i] = part.as_quads()
0168 
0169             data[i].view(np.int32)[1,1] = i + 1           # 1-based part index, where parent 0 means None
0170             data[i].view(np.int32)[1,2] = 0               # set to boundary index in C++ ggeo-/GPmt
0171             data[i].view(np.int32)[1,3] = part.flags      # used in intersect_ztubs
0172             data[i].view(np.int32)[2,3] = part.typecode   # bbmin.w : typecode 
0173             data[i].view(np.int32)[3,3] = part.node.index # bbmax.w : solid index  
0174 
0175             if explode>0:
0176                 dx = i*explode
0177                 data[i][0,0] += dx
0178                 data[i][2,0] += dx
0179                 data[i][3,0] += dx
0180             pass
0181         pass
0182         buf = data.view(Buf) 
0183         buf.boundaries = map(lambda _:_.boundary, parts) 
0184 
0185         if hasattr(parts, "gcsg"):
0186             buf.gcsg = parts.gcsg 
0187             buf.materials = map(lambda cn:cn.lv.material,filter(lambda cn:cn.lv is not None, buf.gcsg))
0188             buf.lvnames = map(lambda cn:cn.lv.name,filter(lambda cn:cn.lv is not None, buf.gcsg))
0189             buf.pvnames = map(lambda lvn:lvn.replace('lv','pv'), buf.lvnames)
0190         pass
0191         return buf
0192 
0193 
0194 
0195 def treepart_manual_mixin():
0196     """
0197     Using manual mixin approach to avoid changing 
0198     the class hierarchy whilst still splitting base
0199     functionality from partitioner methods.  
0200     """
0201     manual_mixin(Node, NodePartitioner)
0202     manual_mixin(Tree, TreePartitioner)
0203 
0204 
0205 
0206 if __name__ == '__main__':
0207 
0208     args = opticks_main()
0209 
0210     ddpart_manual_mixin()  # add methods to Tubs, Sphere, Elem and Primitive
0211     treepart_manual_mixin() # add methods to Node and Tree
0212 
0213     g = Dddb.parse(args.apmtddpath)
0214 
0215     lv = g.logvol_("lvPmtHemi")
0216 
0217     tr = Tree(lv)
0218 
0219     parts = tr.parts()
0220 
0221     partsbuf = tr.convert(parts) 
0222 
0223     log.warning("use analytic so save the PMT, this is just for testing tree conversion")
0224 
0225