Page Menu
Home
c4science
Search
Configure Global Search
Log In
Files
F119639697
get_ncorners.py
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Subscribers
None
File Metadata
Details
File Info
Storage
Attached
Created
Sat, Jun 28, 04:58
Size
3 KB
Mime Type
text/x-python
Expires
Mon, Jun 30, 04:58 (1 d, 19 h)
Engine
blob
Format
Raw Data
Handle
27049351
Attached To
R8800 solar_potential
get_ncorners.py
View Options
import
numpy
as
np
import
pandas
as
pd
import
geopandas
as
gpd
from
shapely.geometry
import
Polygon
import
fiona
import
geo_ops
import
os
import
glob
import
sys
import
time
################# USER INPUTS ################
# file information
ROOFS_FILE
=
sys
.
argv
[
1
]
# NOTE: must be a shapefile
OUTFILE
=
sys
.
argv
[
2
]
# Current batch information
BATCH_SIZE
=
int
(
sys
.
argv
[
3
])
################ CONSTANTS ###################
# field names in shapefile
AREA_FIELD
=
'FLAECHE'
TILT_FIELD
=
'NEIGUNG'
ORIENTATION_FIELD
=
'AUSRICHTUN'
# N_ITER = int(np.ceil( float(FILESIZE) / float(BATCH_SIZE) ))
TOLERANCE
=
0.1
#============================================#
#============================================#
############### Main function ###############
def
main
():
print_inputs
()
timer
=
time
.
time
()
corner_count
=
[]
# open file with fiona and get file length / number of iterations
file_handle
=
fiona
.
open
(
ROOFS_FILE
)
N_ITER
=
int
(
np
.
ceil
(
len
(
file_handle
)
/
float
(
BATCH_SIZE
)
))
print
(
'Length of file:
%d
'
%
len
(
file_handle
))
# load data one at a time
for
BATCH_ID
in
range
(
N_ITER
):
print
(
'Starting iteration
%d
after
%.3f
seconds'
%
(
BATCH_ID
+
1
,
time
.
time
()
-
timer
))
# load data and perform pre-processing steps (clean & rotate)
roofs
=
load_from_file
(
file_handle
,
batch_id
=
BATCH_ID
,
batch_size
=
BATCH_SIZE
,
tolerance
=
TOLERANCE
)
# count the numbers of corners of each roof shape
corner_count
.
append
(
get_n_corners
(
roofs
)
)
corners_all
=
pd
.
concat
(
corner_count
)
corners_all
.
to_csv
(
OUTFILE
)
print
(
'Saved file with n_corner information to
%s
'
%
OUTFILE
)
file_handle
.
close
()
################ SUBROUTINES ################
def
load_from_file
(
file
,
batch_id
,
batch_size
,
tolerance
):
# load a given batch of lines from file and pre-process the data
# returns geopandas dataframe with polygons and database information
# load selected rows from file
start
=
batch_id
*
batch_size
end
=
(
batch_id
+
1
)
*
batch_size
roofs
=
gpd
.
GeoDataFrame
.
from_features
(
file
[
start
:
end
])
if
len
(
roofs
)
==
0
:
raise
ValueError
(
'Rooftop dataframe is empty - no data loaded from file.'
)
# PRE-PROCESSING: simplify roof shapes
roofs
[
'geometry'
]
=
roofs
.
simplify
(
tolerance
=
tolerance
)
return
roofs
def
get_n_corners
(
roofs
):
# compute the number of corners for each geometry in roofs
# output: dataframe with fields "DF_UID" and "n_corners", can then be merged with rooftops
# step 1: explode roof shapes
unique_gdf
=
roofs
.
explode
()
.
reset_index
()
.
drop
([
'level_0'
,
'level_1'
],
axis
=
1
)
# step 2: attach number of corners to the unique dataframe
# (take len - 1 as connection point of rectangle is always counted twice)
try
:
unique_gdf
[
'n_corners'
]
=
unique_gdf
.
geometry
.
exterior
.
apply
(
lambda
x
:
len
(
x
.
coords
)
-
1
)
except
:
print
(
'Adding n_corners failed: Likely due to failure because of multipart polygons'
)
# step 3: drop unnecessary data
corner_df
=
unique_gdf
.
loc
[:,
[
'DF_UID'
,
'n_corners'
]]
# step 4: merge by DF_UID and get the count (sum over all roof pieces)
return
corner_df
.
groupby
(
'DF_UID'
)
.
sum
()
############ AUXILARY FUNCTIONS ##############
def
print_inputs
():
print
(
'
\n
INPUT DATA:'
)
print
(
"Input file:
%s
"
%
ROOFS_FILE
)
print
(
"Output file:
%s
"
%
OUTFILE
)
print
(
"Batch size:
%d
"
%
BATCH_SIZE
)
print
(
""
)
##############################################
if
__name__
==
'__main__'
:
main
()
Event Timeline
Log In to Comment