Skip to content

Commit

Permalink
fixed a bug to write ds in hdfs. If datapoint has len==1
Browse files Browse the repository at this point in the history
append file fixed
  • Loading branch information
nasirali1 committed Feb 22, 2018
1 parent b397c61 commit 8acb0cb
Showing 1 changed file with 7 additions and 1 deletion.
8 changes: 7 additions & 1 deletion cerebralcortex/core/data_manager/raw/stream_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,6 +368,12 @@ def write_hdfs_day_file(self, participant_id: uuid, stream_id: uuid, data: DataP
if len(data)==1:
filename = self.raw_files_dir+str(participant_id)+"/"+str(stream_id)+"/"+str(day)+".pickle"
try:
if hdfs.exists(filename):
with hdfs.open(filename, "rb") as curfile:
existing_data = curfile.read()
if existing_data is not None:
existing_data = deserialize_obj(existing_data)
chunked_data.extend(existing_data)
with hdfs.open(filename, "wb") as f:
pickle.dump(chunked_data, f)
except Exception as ex:
Expand All @@ -380,7 +386,7 @@ def write_hdfs_day_file(self, participant_id: uuid, stream_id: uuid, data: DataP
with hdfs.open(filename, "rb") as curfile:
existing_data = curfile.read()
if existing_data is not None:
existing_data = serialize_obj(existing_data)
existing_data = deserialize_obj(existing_data)
chunked_data.extend(existing_data)
#chunked_data = list(set(chunked_data)) # remove duplicate
try:
Expand Down

0 comments on commit 8acb0cb

Please sign in to comment.