Commit 948c3ebfa4f3ddb8b09485014696d917bfa02a4d

Authored by Tyler Koyamatsu
0 parents
Exists in master

tif2las

Showing 3 changed files with 638 additions and 0 deletions Side-by-side Diff

... ... @@ -0,0 +1,506 @@
  1 +import time
  2 +import concurrent.futures
  3 +from osgeo import gdal
  4 +import os
  5 +import shutil
  6 +import glob
  7 +import pandas as pd
  8 +import pdal
  9 +import multiprocessing
  10 +import sys
  11 +import argparse
  12 +import re
  13 +import numpy as np
  14 +import pydicom as dicom
  15 +import subprocess
  16 +
  17 +output12 = 0
  18 +output22 = 0
  19 +output32 = 0
  20 +img_names2 = []
  21 +z_step2 = 0
  22 +mode2 = 0
  23 +input2 = 0
  24 +cores2 = 0
  25 +v22 = 0
  26 +v12 = 0
  27 +s22 =0
  28 +s12 = 0
  29 +h22=0
  30 +h12=0
  31 +grey_scale_value2=0
  32 +process_name2=0
  33 +#Arguments
  34 +def arguments():
  35 + print('starting up')
  36 + parser = argparse.ArgumentParser()
  37 + global output12,output22,color,mode2,output32,img_names2,z_step2,input2,cores2,v22,v12,s22,s12,h22,h12,grey_scale_value_lower2,grey_scale_value_upper2,process_name2,keeptif,dcm2tif,scale_x,scale_y,scale_z,NumFrame,classnumb
  38 + parser.add_argument('-i','--input',required=True, help='(REQUIRED) input folder')
  39 + parser.add_argument('-o','--output',required=True, help='(REQUIRED) output folder')
  40 + parser.add_argument('-r','--rgb', action="store_true", default = False,help='rgb image')
  41 + parser.add_argument('-c','--cores', default=4, type=int, help='how many cpu cores you want to dedicate')
  42 + parser.add_argument('--gsupper', type=int, help='grayscale cutoff value, upper limit')
  43 + parser.add_argument('--gslower', type=int, help='grayscale cutoff value, lower limit')
  44 + parser.add_argument('--r1', default=0, type=int, help='bottom red value for RGB filtering')
  45 + parser.add_argument('--r2', default=0, type=int, help='top red value for RGB filtering')
  46 + parser.add_argument('--b1', default=0, type=int, help='bottom blue value for RGB filtering')
  47 + parser.add_argument('--b2', default=0, type=int, help='top blue value for RGB filtering')
  48 + parser.add_argument('--g1', default=0, type=int, help='bottom green for RGB filtering')
  49 + parser.add_argument('--g2', default=0, type=int, help='top green for RGB filtering')
  50 + parser.add_argument('--scale_x', default=0, help='scale of x from pdal writers.las options, default .01')
  51 + parser.add_argument('--scale_y', default=0, help='scale of y from pdal writers.las options, default .01')
  52 + parser.add_argument('--scale_z', default=0.01, type=float, help='scale of z from pdal writers.las options, default .01')
  53 + parser.add_argument('--z_step', default=1, type=float, help='Z step from image to image')
  54 + parser.add_argument('--keeptif', default=False, action="store_true", help='when using dicom2laz keep converted tif')
  55 + parser.add_argument('--dcm2tif', default=False, action="store_true", help='dicom 2 tif')
  56 + parser.add_argument('-n', '--classnumb', default=0, type=int, help='Total number of classifications to add')
  57 + args = vars(parser.parse_args())
  58 + color = args['rgb']
  59 + classnumb = args['classnumb']
  60 + classnumb = int(classnumb)
  61 + grey_scale_value_lower2 = args['gslower']
  62 + grey_scale_value_upper2 = args['gsupper']
  63 + h12 = args['r1']
  64 + h22 = args['r2']
  65 + s12 = args['b1']
  66 + s22 = args['b2']
  67 + v12 = args['g1']
  68 + v22 = args['g2']
  69 + cores2 = args['cores']
  70 + input2 = args['input']
  71 + output12 = args['output']
  72 + z_step2 = args['z_step']
  73 + keeptif = args['keeptif']
  74 + dcm2tif = args['dcm2tif']
  75 + scale_x = args['scale_x']
  76 + scale_y = args['scale_y']
  77 + scale_z = args['scale_z']
  78 + for images in glob.glob(input2 + '/*'):
  79 + img_names2.append(images)
  80 + output22 = output12 + '/'
  81 + output32 = re.escape(output12)
  82 + directory, images2 = os.path.split(images)
  83 + fileName, fileExt = os.path.splitext(images2)
  84 + print(scale_x)
  85 + if str(fileExt) == '.dcm':
  86 + mode2 = 'dcm'
  87 + if mode2 == 'dcm':
  88 + ds = dicom.dcmread((img_names2[0]), force=True)
  89 + try:
  90 + scale = ds.PixelSpacing
  91 + scale_x = scale[0]
  92 + scale_y = scale[1]
  93 + except AttributeError:
  94 + scale_x = input('No Pixel Spacing found, please enter preferred x scale (Default 1):')
  95 + scale_y = input('No Pixel Spacing found, please enter preferred y scale (Default 1):')
  96 + scale_x = float(scale_x)
  97 + scale_y = float(scale_y)
  98 + pass
  99 + if dcm2tif != True:
  100 + if not os.path.exists(output12):
  101 + os.mkdir(output12)
  102 + if not os.path.exists(output22 + 'xyz'):
  103 + os.mkdir(output22 + 'xyz')
  104 + if not os.path.exists(output22 + 'csv'):
  105 + os.mkdir(output22 + 'csv')
  106 + if not os.path.exists(output22 + 'las'):
  107 + os.mkdir(output22 + 'las')
  108 + if not os.path.exists(output22 + 'img'):
  109 + os.mkdir(output22 + 'img')
  110 + return output12,output22,output32,img_names2,z_step2,color,input2,cores2,v22,v12,s22,s12,h22,h12,grey_scale_value_lower2,grey_scale_value_upper2,process_name2,dcm2tif,keeptif,scale_x,scale_y,scale_z,mode2,classnumb
  111 +
  112 +
  113 +def pdalinsert(output4,filename,):
  114 + json1 = """
  115 + [
  116 + {{
  117 + "type":"readers.text",
  118 + "filename":"{file}/csv/{name}.csv"
  119 + }},
  120 + {{
  121 + "type":"writers.las",
  122 + "filename":"{file}/las/{name}.laz"
  123 + }}
  124 + ]
  125 + """
  126 + json2 = json1.format(file=output4, name=filename)
  127 + return json2
  128 +
  129 +#Greyscale
  130 +def grayscale(input):
  131 + img_name = input[0]
  132 + output2 = input[1][0]
  133 + z_step=input[1][1]
  134 + mode=input[1][2]
  135 + grey_scale_value_lower=input[1][9]
  136 + img_names=input[1][10]
  137 + scale_x = input[1][13]
  138 + scale_y = input[1][14]
  139 + scale_z = input[1][15]
  140 + grey_scale_value_upper= input[1][16]
  141 + output4 = output2.replace(os.sep,'/')
  142 + z_count2 = img_names.index(img_name)
  143 + z_count = (z_step * z_count2) - (z_step - 1)
  144 + ds = gdal.Open(img_name)
  145 + directory, img_name2 = os.path.split(img_name)
  146 + fileName, fileExt = os.path.splitext(img_name2)
  147 + print('Processing ' + fileName)
  148 + out_ds = gdal.Translate(output2 + 'xyz/' + fileName + '.xyz', ds, format='XYZ')
  149 + ds = None
  150 + out_ds = None
  151 + pd1 = pd.read_csv(output2 + 'xyz/' + fileName + '.xyz', sep=' ')
  152 + pd1.columns = ['X', 'Y', 'intensity']
  153 + pd1.insert(loc=2, column='Z', value=z_count)
  154 + if grey_scale_value_lower is not None:
  155 + pd1 = pd1[pd1.intensity >= grey_scale_value_lower]
  156 + if grey_scale_value_upper is not None:
  157 + pd1 = pd1[pd1.intensity <= grey_scale_value_upper]
  158 + pd1.to_csv(output2 + 'csv/' + fileName + '.csv', sep=' ', index=False)
  159 + json = pdalinsert(output4,fileName)
  160 + pipeline = pdal.Pipeline(json)
  161 + p = pipeline.execute()
  162 + pdl = None
  163 + os.remove(output2 + 'xyz/' + fileName + '.xyz')
  164 + os.remove(output2 + 'csv/' + fileName + '.csv')
  165 + print(f'{fileName} was processed')
  166 +#Color
  167 +def rgb(input):
  168 + img_name = input[0]
  169 + output2 = input[1][0]
  170 + z_step=input[1][1]
  171 + mode=input[1][2]
  172 + img_names=input[1][10]
  173 + r2=input[1][3]
  174 + r1=input[1][4]
  175 + b2=input[1][5]
  176 + b1=input[1][6]
  177 + g2=input[1][7]
  178 + g1=input[1][8]
  179 + scale_x = input[1][13]
  180 + scale_y = input[1][14]
  181 + scale_z = input[1][15]
  182 + output4 = output2.replace(os.sep,'/')
  183 + z_count2 = img_names.index(img_name)
  184 + z_count = (z_step * z_count2) - (z_step - 1)
  185 + directory, img_name2 = os.path.split(img_name)
  186 + fileName, fileExt = os.path.splitext(img_name2)
  187 + print('Processing ' + fileName)
  188 + ds = gdal.Open(img_name)
  189 + out_ds = gdal.Translate(output2 + 'xyz/' + fileName + 'r' + '.xyz', ds, format='XYZ', bandList=[1])
  190 + out_dss = gdal.Translate(output2 + 'xyz/' + fileName + 'g' + '.xyz', ds, format='XYZ', bandList=[2])
  191 + out_dsss = gdal.Translate(output2 + 'xyz/' + fileName + 'b' + '.xyz', ds, format='XYZ', bandList=[3])
  192 + pd1 = pd.read_csv(output2 + 'xyz/' + fileName + 'r' + '.xyz', sep=' ')
  193 + pd2 = pd.read_csv(output2 + 'xyz/' + fileName + 'g' + '.xyz', sep=' ')
  194 + pd3 = pd.read_csv(output2 + 'xyz/' + fileName + 'b' + '.xyz', sep=' ')
  195 + pd1.columns = ['X', 'Y', 'red']
  196 + pd2.columns = ['X', 'Y', 'green']
  197 + pd3.columns = ['X', 'Y', 'blue']
  198 + pd1.insert(loc=2, column='Z', value=z_count)
  199 + pd1['green'] = pd2['green']
  200 + pd1['blue'] = pd3['blue']
  201 + pd6 = pd1.loc[(pd1['red'] == 0) & (pd1['green'] == 0) & (pd1['blue'] == 0)]
  202 + pd1 = pd1[~pd1.isin(pd6)].dropna()
  203 + if ((g1 == 0) & (g2 == 0) & (b1 == 0) & (b2 == 0)):
  204 + pd7 = pd1.loc[(pd1['red']>= r1) & (pd1['red']<= r2)]
  205 + elif ((r1 == 0) & (r2 == 0) & (b1 == 0) & (b2 ==0)):
  206 + pd7 = pd1.loc[(pd1['green']>= g1) & (pd1['green']<= g2)]
  207 + elif ((r1 == 0) & (r2 == 0) & (g1 == 0) & (g2 == 0)):
  208 + pd7 = pd1.loc[(pd1['blue']>= b1) & (pd1['blue']<= b2)]
  209 + elif ((g1 == 0) & (g2 == 0)):
  210 + pd7 = pd1.loc[(pd1['red']>= r1) & (pd1['red']<= r2) & (pd1['blue']>= b1) & (pd1['blue']<= b2)]
  211 + elif ((b1 == 0) & (b2 == 0)):
  212 + pd7 = pd1.loc[(pd1['red']>= r1) & (pd1['red']<= r2) & (pd1['green']>= g1) & (pd1['green']<= g2)]
  213 + elif ((r1 == 0) & (r2 == 0)):
  214 + pd7 = pd1.loc[(pd1['blue']>= b1) & (pd1['blue']<= b2) & (pd1['green']>= g1) & (pd1['green']<= g2)]
  215 + else:
  216 + pd7 = pd1.loc[(pd1['red']>= r1) & (pd1['red']<= r2) & (pd2['green']>= g1) & (pd2['green']<= g2) & (pd3['blue']>= b1)& (pd3['blue']<= b2)]
  217 + pd1 = pd1[~pd1.isin(pd7)].dropna()
  218 + pd1.to_csv(output2 + 'csv/' + fileName + '.csv', sep=' ', index=False)
  219 + json = pdalinsert(output4,fileName)
  220 + pipeline = pdal.Pipeline(json)
  221 + p = pipeline.execute()
  222 + img = None
  223 + hsv = None
  224 + mask = None
  225 + inv_mask = None
  226 + res = None
  227 + pdl = None
  228 + pd1 = None
  229 + pd2 = None
  230 + pd3 = None
  231 + ds = None
  232 + out_ds = None
  233 + out_dss = None
  234 + out_dsss = None
  235 + os.remove(output2 + 'xyz/' + fileName + 'r' + '.xyz')
  236 + os.remove(output2 + 'xyz/' + fileName + 'g' + '.xyz')
  237 + os.remove(output2 + 'xyz/' + fileName + 'b' + '.xyz')
  238 + os.remove(output2 + 'csv/' + fileName + '.csv')
  239 + print(f'{fileName} was processed')
  240 +#Dicom
  241 +def dicom2laz(input):
  242 + img_name = input[0]
  243 + output2 = input[1][0]
  244 + z_step=input[1][1]
  245 + grey_scale_value_lower=input[1][9]
  246 + img_names=input[1][10]
  247 + keeptif = input[1][11]
  248 + dcm2tif = input[1][12]
  249 + scale_x = input[1][13]
  250 + scale_y = input[1][14]
  251 + scale_z = input[1][15]
  252 + grey_scale_value_upper = input[1][16]
  253 + directory, img_name2 = os.path.split(img_name)
  254 + fileName, fileExt = os.path.splitext(img_name2)
  255 + print('Processing ' + fileName)
  256 + z_count2 = img_names.index(img_name)
  257 + z_count = (z_step * z_count2) - (z_step - 1)
  258 + ds = dicom.dcmread((img_name),force=True)
  259 + arr = ds.pixel_array
  260 + if scale_x == 0:
  261 + scale = ds.PixelSpacing
  262 + scale_x = scale[0]
  263 + scale_y = scale[1]
  264 + norm = np.linalg.norm(arr)
  265 + norm_arr = arr / norm
  266 + to_tiff = norm_arr * 65536
  267 + try:
  268 + NumFrame = ds.NumberOfFrames
  269 + except AttributeError:
  270 + print('Number of Frames not detected, defaulting to 1')
  271 + NumFrame = 1
  272 + pass
  273 + if NumFrame == 1:
  274 + m, n = to_tiff.shape
  275 + R, C = np.mgrid[:m, :n]
  276 + out = np.column_stack((C.ravel(), R.ravel(), to_tiff.ravel()))
  277 + np.savetxt(output2 + fileName + '.xyz', out)
  278 + ds = gdal.Open(output2 + fileName + '.xyz')
  279 + gdal.Translate(output2 + fileName + '.tif', ds, format='Gtiff')
  280 + ds = None
  281 + arr = None
  282 + norm = None
  283 + norm_arr = None
  284 + to_tiff = None
  285 + m = None
  286 + n = None
  287 + R = None
  288 + C = None
  289 + out = None
  290 + ds = None
  291 + os.remove(output2 + fileName + '.xyz')
  292 + else:
  293 + for FrameNum in range(NumFrame):
  294 + tempvar = to_tiff[FrameNum]
  295 + m, n = tempvar.shape
  296 + R, C = np.mgrid[:m, :n]
  297 + out = np.column_stack((C.ravel(), R.ravel(), tempvar.ravel()))
  298 + print('error here')
  299 + FrameStr = str(FrameNum)
  300 + np.savetxt(output2 + fileName + '_' + FrameStr + '.xyz', out)
  301 + ds = gdal.Open(output2 + fileName + '_' + FrameStr + '.xyz')
  302 + gdal.Translate(output2 + fileName + '_' + FrameStr + '.tif',ds,format='Gtiff')
  303 + tempvar = None
  304 + ds = None
  305 + arr = None
  306 + norm = None
  307 + norm_arr = None
  308 + m = None
  309 + n = None
  310 + R = None
  311 + C = None
  312 + out = None
  313 + ds = None
  314 + os.remove(output2 + fileName + '_' + FrameStr + '.xyz')
  315 + if dcm2tif == True:
  316 + print(f'{fileName} was processed')
  317 + return
  318 + img_name = output2 + fileName + '.tif'
  319 + output4 = output2.replace(os.sep,'/')
  320 + ds = gdal.Open(img_name)
  321 + directory, img_name2 = os.path.split(img_name)
  322 + fileName, fileExt = os.path.splitext(img_name2)
  323 + out_ds = gdal.Translate(output2 + 'xyz/' + fileName + '.xyz', ds, format='XYZ')
  324 + ds = None
  325 + out_ds = None
  326 + pd1 = pd.read_csv(output2 + 'xyz/' + fileName + '.xyz', sep=' ')
  327 + pd1.columns = ['X', 'Y', 'intensity']
  328 + pd1.insert(loc=2, column='Z', value=z_count)
  329 + if grey_scale_value_lower is not None:
  330 + pd1 = pd1[pd1.intensity >= grey_scale_value_lower]
  331 + if grey_scale_value_upper is not None:
  332 + pd1 = pd1[pd1.intensity <= grey_scale_value_upper]
  333 + pd1.X = pd1.X*scale_x
  334 + pd1.Y = pd1.Y*scale_y
  335 + pd1.to_csv(output2 + 'csv/' + fileName + '.csv', sep=' ', index=False)
  336 + json = pdalinsert(output4,fileName)
  337 + pipeline = pdal.Pipeline(json)
  338 + p = pipeline.execute()
  339 + pdl = None
  340 + os.remove(output2 + 'xyz/' + fileName + '.xyz')
  341 + os.remove(output2 + 'csv/' + fileName + '.csv')
  342 + if keeptif == True:
  343 + print(f'{fileName} was processed')
  344 + return
  345 + os.remove(output2 + fileName + '.tif')
  346 + print(f'{fileName} was processed ' + str(z_step) + ' ' + str(scale_x) + ' ' + str(scale_y))
  347 +
  348 +#Classify
  349 +def classification(output,classnumb):
  350 + inp = output + '/las'
  351 + classnumb = classnumb
  352 + print(inp)
  353 + jsonclassify = """
  354 + [
  355 + {{
  356 + "type":"readers.las",
  357 + "filename":"{directory}/{name}"
  358 + }},
  359 + {{
  360 + "type":"filters.ferry",
  361 + "dimensions":"=>Classification"
  362 +
  363 + }},
  364 + {{
  365 + "type":"filters.assign",
  366 + "assignment":"Classification[:]=0"
  367 +
  368 + }},
  369 + {{
  370 + "type":"writers.las",
  371 + "filename":"{directory}/{name}"
  372 + }}
  373 + ]
  374 + """
  375 +
  376 + json = """
  377 + [
  378 + {{
  379 + "type":"readers.las",
  380 + "filename":"{directory}/{name}"
  381 + }},
  382 + {{
  383 + "type":"filters.assign",
  384 + "value":"Classification = {classification} WHERE {classtype} > {lowclass} && {classtype} < {uppclass}"
  385 +
  386 + }},
  387 + {{
  388 + "type":"writers.las",
  389 + "filename":"{directory}/{name}"
  390 + }}
  391 + ]
  392 + """
  393 +
  394 + jsonnullup = """
  395 + [
  396 + {{
  397 + "type":"readers.las",
  398 + "filename":"{directory}/{name}"
  399 + }},
  400 + {{
  401 + "type":"filters.assign",
  402 + "value":"Classification = {classification}",
  403 + "where": "{classtype} > {lowclass}"
  404 +
  405 + }},
  406 + {{
  407 + "type":"writers.las",
  408 + "filename":"{directory}/{name}"
  409 + }}
  410 + ]
  411 + """
  412 +
  413 + jsonnulllow = """
  414 + [
  415 + {{
  416 + "type":"readers.las",
  417 + "filename":"{directory}/{name}"
  418 + }},
  419 + {{
  420 + "type":"filters.assign",
  421 + "value":"Classification = {classification}",
  422 + "where": "{classtype} < {uppclass}"
  423 +
  424 + }},
  425 + {{
  426 + "type":"writers.las",
  427 + "filename":"{directory}/{name}"
  428 + }}
  429 + ]
  430 + """
  431 + filename = os.path.basename(inp)
  432 + classtype = []
  433 + lowup = []
  434 + classnumb = classnumb
  435 + for numb in range(classnumb):
  436 + string = 'Class ' + str(numb + 1) + ': Classify by Intesity? [y/n]'
  437 + answer = input(string)
  438 + if not answer or answer[0].lower() != 'y':
  439 + classtype.append(input('Type Class Identifier:'))
  440 + classtype[numb] = str(classtype[numb])
  441 + else:
  442 + classtype.append('Intensity')
  443 + string = 'Class ' + str(numb + 1) + ': Lower ' + classtype[numb] + ' value (enter [n] if DNE):'
  444 + answer1 = input(string)
  445 + if answer1 == 'n':
  446 + answer1 = None
  447 + string = 'Class ' + str(numb + 1) + ': Upper ' + classtype[numb] + ' value (enter [n] if DNE):'
  448 + answer2 = input(string)
  449 + if answer2 == 'n':
  450 + answer2 = None
  451 + lowup.append([answer1, answer2])
  452 + print(answer1)
  453 + print(answer2)
  454 + for filenames in glob.glob(inp + '\*.laz'):
  455 + directory, name2 = os.path.split(filenames)
  456 + json1 = jsonclassify.format(directory=directory, name=name2)
  457 + json2 = json1.replace('\\', '/')
  458 + pipeline = pdal.Pipeline(json2)
  459 + p = pipeline.execute()
  460 + for numb in range(classnumb):
  461 + if lowup[numb][0] is None:
  462 + json1 = jsonnulllow.format(directory=directory, name=name2, uppclass=int(lowup[numb][1]),
  463 + classtype=classtype[numb], classification=(numb + 1))
  464 + json2 = json1.replace('\\', '/')
  465 + pipeline = pdal.Pipeline(json2)
  466 + p = pipeline.execute()
  467 + elif lowup[numb][1] is None:
  468 + json1 = jsonnullup.format(directory=directory, name=name2, lowclass=int(lowup[numb][0]),
  469 + classtype=classtype[numb], classification=(numb + 1))
  470 + json2 = json1.replace('\\', '/')
  471 + pipeline = pdal.Pipeline(json2)
  472 + p = pipeline.execute()
  473 + else:
  474 + json1 = json.format(directory=directory, name=name2, lowclass=int(lowup[numb][0]),
  475 + uppclass=int(lowup[numb][1]), classtype=classtype[numb], classification=(numb + 1))
  476 + json2 = json1.replace('\\', '/')
  477 + pipeline = pdal.Pipeline(json2)
  478 + p = pipeline.execute()
  479 + print(name2 + ' has been classified')
  480 +
  481 +
  482 +#command line options
  483 +#rgb or grayscale in pool.map
  484 +def main(img_names3,output23,z_step3,mode3,v23,v13,s23,s13,h23,h13,grey_scale_value_lower3,keeptif2,dcm2tif2,scale_x,scale_y,scale_z,grey_scale_value_upper3):
  485 + iterable = img_names3
  486 + args = [output23,z_step3,mode3,v23,v13,s23,s13,h23,h13,grey_scale_value_lower3,img_names3,keeptif2,dcm2tif2,scale_x,scale_y,scale_z,grey_scale_value_upper3]
  487 + new_iterable=([x,args] for x in iterable)
  488 + if color == True:
  489 + process_name_2 = rgb
  490 + else:
  491 + process_name_2 = grayscale
  492 + if mode2 == 'dcm':
  493 + process_name_2 = dicom2laz
  494 + if dcm2tif2 == True:
  495 + process_name_2 = dicom2laz
  496 + with multiprocessing.Pool(processes=cores2) as pool:
  497 + result = pool.map(process_name_2,new_iterable)
  498 +
  499 +if __name__=='__main__':
  500 + multiprocessing.freeze_support()
  501 + arguments()
  502 + main(img_names2,output22,z_step2,mode2,v22,v12,s22,s12,h22,h12,grey_scale_value_lower2,keeptif,dcm2tif,scale_x,scale_y,scale_z,grey_scale_value_upper2)
  503 + if not classnumb == 0:
  504 + classification(output22,classnumb)
  505 +
  506 +
tif2las.spec View file @ 948c3eb
... ... @@ -0,0 +1,33 @@
  1 +# -*- mode: python ; coding: utf-8 -*-
  2 +
  3 +block_cipher = None
  4 +
  5 +
  6 +a = Analysis(['tif2las.py'],
  7 + pathex=['C:\\Users\\tyler\\OneDrive\\Desktop\\Current'],
  8 + binaries=[],
  9 + datas=[],
  10 + hiddenimports=[],
  11 + hookspath=[],
  12 + runtime_hooks=[],
  13 + excludes=[],
  14 + win_no_prefer_redirects=False,
  15 + win_private_assemblies=False,
  16 + cipher=block_cipher,
  17 + noarchive=False)
  18 +pyz = PYZ(a.pure, a.zipped_data,
  19 + cipher=block_cipher)
  20 +exe = EXE(pyz,
  21 + a.scripts,
  22 + a.binaries,
  23 + a.zipfiles,
  24 + a.datas,
  25 + [],
  26 + name='tif2las',
  27 + debug=False,
  28 + bootloader_ignore_signals=False,
  29 + strip=False,
  30 + upx=True,
  31 + upx_exclude=[],
  32 + runtime_tmpdir=None,
  33 + console=True )
tif2lascondaenv.txt View file @ 948c3eb
... ... @@ -0,0 +1,99 @@
  1 +# This file may be used to create an environment using:
  2 +# $ conda create --name <env> --file <this file>
  3 +# platform: win-64
  4 +@EXPLICIT
  5 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2020d-h516909a_0.tar.bz2
  6 +https://repo.anaconda.com/pkgs/main/win-64/blas-1.0-mkl.conda
  7 +https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2020.12.5-h5b45459_0.tar.bz2
  8 +https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2020.3-h57928b3_311.tar.bz2
  9 +https://repo.anaconda.com/pkgs/msys2/win-64/msys2-conda-epoch-20160418-1.tar.bz2
  10 +https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.10-0.tar.bz2
  11 +https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.16.27012-h30e32a0_2.tar.bz2
  12 +https://repo.anaconda.com/pkgs/msys2/win-64/m2w64-gmp-6.1.0-2.tar.bz2
  13 +https://repo.anaconda.com/pkgs/msys2/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2
  14 +https://repo.anaconda.com/pkgs/main/win-64/mkl-2020.2-256.conda
  15 +https://conda.anaconda.org/conda-forge/win-64/vc-14.1-h869be7e_1.tar.bz2
  16 +https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-he774522_3.tar.bz2
  17 +https://conda.anaconda.org/conda-forge/win-64/cfitsio-3.470-h0af3d06_7.tar.bz2
  18 +https://conda.anaconda.org/conda-forge/win-64/expat-2.2.9-h33f27b4_2.tar.bz2
  19 +https://conda.anaconda.org/conda-forge/win-64/freeglut-3.0.0-h6538335_1005.tar.bz2
  20 +https://conda.anaconda.org/conda-forge/win-64/geos-3.8.1-he025d50_0.tar.bz2
  21 +https://conda.anaconda.org/conda-forge/win-64/icu-67.1-h33f27b4_0.tar.bz2
  22 +https://conda.anaconda.org/conda-forge/win-64/jpeg-9d-h8ffe710_0.tar.bz2
  23 +https://conda.anaconda.org/conda-forge/win-64/jsoncpp-1.8.4-h1ad3211_1002.tar.bz2
  24 +https://conda.anaconda.org/conda-forge/win-64/laszip-3.4.3-h6538335_1.tar.bz2
  25 +https://conda.anaconda.org/conda-forge/win-64/laz-perf-1.4.4-ha925a31_0.tar.bz2
  26 +https://conda.anaconda.org/conda-forge/win-64/libblas-3.8.0-20_mkl.tar.bz2
  27 +https://conda.anaconda.org/conda-forge/win-64/libclang-10.0.1-default_hf44288c_1.tar.bz2
  28 +https://conda.anaconda.org/conda-forge/win-64/libiconv-1.16-he774522_0.tar.bz2
  29 +https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.1.0-h8ffe710_3.tar.bz2
  30 +https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.9.2-h62dcd97_2.tar.bz2
  31 +https://repo.anaconda.com/pkgs/msys2/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2
  32 +https://conda.anaconda.org/conda-forge/win-64/nitro-2.7.dev6-h33f27b4_3.tar.bz2
  33 +https://conda.anaconda.org/conda-forge/win-64/openssl-1.1.1h-he774522_0.tar.bz2
  34 +https://conda.anaconda.org/conda-forge/win-64/pcre-8.44-ha925a31_0.tar.bz2
  35 +https://conda.anaconda.org/conda-forge/win-64/pixman-0.40.0-h8ffe710_0.tar.bz2
  36 +https://conda.anaconda.org/conda-forge/win-64/sqlite-3.33.0-he774522_1.tar.bz2
  37 +https://conda.anaconda.org/conda-forge/win-64/tk-8.6.10-he774522_1.tar.bz2
  38 +https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.3-ha925a31_1.tar.bz2
  39 +https://conda.anaconda.org/conda-forge/win-64/xz-5.2.5-h62dcd97_1.tar.bz2
  40 +https://conda.anaconda.org/conda-forge/win-64/zlib-1.2.11-h62dcd97_1010.tar.bz2
  41 +https://conda.anaconda.org/conda-forge/win-64/freexl-1.0.5-hd288d7e_1002.tar.bz2
  42 +https://conda.anaconda.org/conda-forge/win-64/hdf4-4.2.13-hf8e6fe8_1003.tar.bz2
  43 +https://conda.anaconda.org/conda-forge/win-64/jasper-2.0.14-hdc05fd1_1.tar.bz2
  44 +https://conda.anaconda.org/conda-forge/win-64/krb5-1.17.1-hc04afaa_3.tar.bz2
  45 +https://conda.anaconda.org/conda-forge/win-64/libcblas-3.8.0-20_mkl.tar.bz2
  46 +https://conda.anaconda.org/conda-forge/win-64/liblapack-3.8.0-20_mkl.tar.bz2
  47 +https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.37-h1d00b33_2.tar.bz2
  48 +https://conda.anaconda.org/conda-forge/win-64/libssh2-1.9.0-hb06d900_5.tar.bz2
  49 +https://conda.anaconda.org/conda-forge/win-64/libxml2-2.9.10-h1006b36_2.tar.bz2
  50 +https://repo.anaconda.com/pkgs/msys2/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2
  51 +https://repo.anaconda.com/pkgs/main/win-64/python-3.8.3-he1778fa_2.conda
  52 +https://conda.anaconda.org/conda-forge/win-64/zstd-1.4.5-h1f3a1b7_2.tar.bz2
  53 +https://conda.anaconda.org/conda-forge/win-64/boost-cpp-1.74.0-hd4e6614_0.tar.bz2
  54 +https://conda.anaconda.org/conda-forge/win-64/freetype-2.10.4-h546665d_0.tar.bz2
  55 +https://conda.anaconda.org/conda-forge/win-64/libcurl-7.71.1-h4b64cdc_8.tar.bz2
  56 +https://conda.anaconda.org/conda-forge/win-64/liblapacke-3.8.0-20_mkl.tar.bz2
  57 +https://conda.anaconda.org/conda-forge/win-64/libpq-12.3-hd9aa61d_2.tar.bz2
  58 +https://conda.anaconda.org/conda-forge/win-64/libtiff-4.1.0-hc10be44_6.tar.bz2
  59 +https://repo.anaconda.com/pkgs/msys2/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2
  60 +https://conda.anaconda.org/conda-forge/noarch/pydicom-2.1.0-pyhd3deb0d_0.tar.bz2
  61 +https://conda.anaconda.org/conda-forge/win-64/python_abi-3.8-1_cp38.tar.bz2
  62 +https://repo.anaconda.com/pkgs/main/noarch/pytz-2020.1-py_0.conda
  63 +https://conda.anaconda.org/conda-forge/win-64/qt-5.12.9-hb2cf2c5_0.tar.bz2
  64 +https://repo.anaconda.com/pkgs/main/noarch/six-1.15.0-py_0.conda
  65 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.35.1-pyh9f0ad1d_0.tar.bz2
  66 +https://repo.anaconda.com/pkgs/main/win-64/wincertstore-0.2-py38_0.conda
  67 +https://conda.anaconda.org/conda-forge/win-64/certifi-2020.12.5-py38haa244fe_0.tar.bz2
  68 +https://conda.anaconda.org/conda-forge/win-64/curl-7.71.1-h4b64cdc_8.tar.bz2
  69 +https://conda.anaconda.org/conda-forge/win-64/hdf5-1.10.6-nompi_h89124ea_1110.tar.bz2
  70 +https://conda.anaconda.org/conda-forge/win-64/libffi-3.2.1-ha925a31_1007.tar.bz2
  71 +https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-he9e54da_1012.tar.bz2
  72 +https://conda.anaconda.org/conda-forge/win-64/libopencv-4.5.0-py38_2.tar.bz2
  73 +https://repo.anaconda.com/pkgs/main/win-64/mkl-service-2.3.0-py38hb782905_0.conda
  74 +https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.3.1-h57dd2e7_3.tar.bz2
  75 +https://conda.anaconda.org/conda-forge/win-64/postgresql-12.3-he14cc48_2.tar.bz2
  76 +https://conda.anaconda.org/conda-forge/win-64/proj-7.1.1-h7d85306_3.tar.bz2
  77 +https://repo.anaconda.com/pkgs/main/noarch/python-dateutil-2.8.1-py_0.conda
  78 +https://conda.anaconda.org/conda-forge/win-64/geotiff-1.6.0-h8884d1a_3.tar.bz2
  79 +https://conda.anaconda.org/conda-forge/win-64/gettext-0.19.8.1-hfbb10ce_1004.tar.bz2
  80 +https://conda.anaconda.org/conda-forge/win-64/kealib-1.4.13-h3b59ab9_1.tar.bz2
  81 +https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.7.4-nompi_h2ee746f_106.tar.bz2
  82 +https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.0.0-hf693123_0.tar.bz2
  83 +https://repo.anaconda.com/pkgs/main/win-64/setuptools-50.3.1-py38haa95532_1.conda
  84 +https://conda.anaconda.org/conda-forge/win-64/tiledb-2.1.2-hfabd47f_0.tar.bz2
  85 +https://conda.anaconda.org/conda-forge/win-64/libglib-2.66.2-h35efcdc_0.tar.bz2
  86 +https://conda.anaconda.org/conda-forge/noarch/pip-20.2.4-py_0.tar.bz2
  87 +https://conda.anaconda.org/conda-forge/win-64/cairo-1.16.0-hd28d34b_1006.tar.bz2
  88 +https://conda.anaconda.org/conda-forge/win-64/glib-2.66.2-h0e60522_0.tar.bz2
  89 +https://conda.anaconda.org/conda-forge/win-64/poppler-0.89.0-h5d62644_1.tar.bz2
  90 +https://conda.anaconda.org/conda-forge/win-64/libgdal-3.1.4-h0e5aa5a_0.tar.bz2
  91 +https://conda.anaconda.org/conda-forge/win-64/gdal-3.1.4-py38h8f7194f_0.tar.bz2
  92 +https://conda.anaconda.org/conda-forge/win-64/opencv-4.5.0-py38_2.tar.bz2
  93 +https://conda.anaconda.org/conda-forge/win-64/pdal-2.2.0-hcb5b5b6_1.tar.bz2
  94 +https://conda.anaconda.org/conda-forge/win-64/py-opencv-4.5.0-py38h1cdfbd6_2.tar.bz2
  95 +https://conda.anaconda.org/conda-forge/win-64/python-pdal-2.3.5-py38h6a11f7f_0.tar.bz2
  96 +https://repo.anaconda.com/pkgs/main/win-64/mkl_fft-1.2.0-py38h45dec08_0.conda
  97 +https://repo.anaconda.com/pkgs/main/win-64/mkl_random-1.1.1-py38h47e9c7a_0.conda
  98 +https://repo.anaconda.com/pkgs/main/win-64/numpy-1.19.2-py38hadc3359_0.conda
  99 +https://repo.anaconda.com/pkgs/main/win-64/pandas-1.1.3-py38ha925a31_0.conda