You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
~/Desktop/usecases/noise-to-signal/chaos_detection.py in create_non_tda_features(path, fourier_window_size, rolling_mean_size, rolling_max_size, rolling_min_size, mad_size, fourier_coefficients)
327 print('Need to specify the fourier coeffcients and the window size')
328 for n in fourier_coefficients:
--> 329 df[f'fourier_w_{n}'] = df['x'].rolling(fourier_window_size).parallel_apply(lambda x: rfft(x)[n],
330 raw=False)
331 # Remove all rows with NaNs
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/context.py in Pool(self, processes, initializer, initargs, maxtasksperchild)
117 '''Returns a process pool object'''
118 from .pool import Pool
--> 119 return Pool(processes, initializer, initargs, maxtasksperchild,
120 context=self.get_context())
121
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/pool.py in init(self, processes, initializer, initargs, maxtasksperchild, context)
210 self._processes = processes
211 try:
--> 212 self._repopulate_pool()
213 except Exception:
214 for p in self._pool:
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/process.py in start(self)
119 'daemonic processes are not allowed to have children'
120 _cleanup()
--> 121 self._popen = self._Popen(self)
122 self._sentinel = self._popen.sentinel
123 # Avoid a refcycle if the target function holds an indirect
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/context.py in _Popen(process_obj)
281 def _Popen(process_obj):
282 from .popen_spawn_posix import Popen
--> 283 return Popen(process_obj)
284
285 class ForkServerProcess(process.BaseProcess):
Hello.
In create_all_features from chaos_detection.py, Pandarallel seems to not work with python 3.8.
I get the following error.
~/Desktop/usecases/noise-to-signal/chaos_detection.py in create_all_features(path, noise_level, return_betti_surface)
355 """
356
--> 357 df = create_non_tda_features(path=path,
358 rolling_max_size=[10, 20, 50],
359 rolling_min_size=[10, 20, 50],
~/Desktop/usecases/noise-to-signal/chaos_detection.py in create_non_tda_features(path, fourier_window_size, rolling_mean_size, rolling_max_size, rolling_min_size, mad_size, fourier_coefficients)
327 print('Need to specify the fourier coeffcients and the window size')
328 for n in fourier_coefficients:
--> 329 df[f'fourier_w_{n}'] = df['x'].rolling(fourier_window_size).parallel_apply(lambda x: rfft(x)[n],
330 raw=False)
331 # Remove all rows with NaNs
~/anaconda3/envs/usecases-gtda/lib/python3.8/site-packages/pandarallel/pandarallel.py in closure(data, func, *args, **kwargs)
433 )
434 try:
--> 435 pool = Pool(
436 nb_workers, worker_init, (prepare_worker(use_memory_fs)(worker),)
437 )
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/context.py in Pool(self, processes, initializer, initargs, maxtasksperchild)
117 '''Returns a process pool object'''
118 from .pool import Pool
--> 119 return Pool(processes, initializer, initargs, maxtasksperchild,
120 context=self.get_context())
121
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/pool.py in init(self, processes, initializer, initargs, maxtasksperchild, context)
210 self._processes = processes
211 try:
--> 212 self._repopulate_pool()
213 except Exception:
214 for p in self._pool:
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/pool.py in _repopulate_pool(self)
301
302 def _repopulate_pool(self):
--> 303 return self._repopulate_pool_static(self._ctx, self.Process,
304 self._processes,
305 self._pool, self._inqueue,
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/pool.py in _repopulate_pool_static(ctx, Process, processes, pool, inqueue, outqueue, initializer, initargs, maxtasksperchild, wrap_exception)
324 w.name = w.name.replace('Process', 'PoolWorker')
325 w.daemon = True
--> 326 w.start()
327 pool.append(w)
328 util.debug('added worker')
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/process.py in start(self)
119 'daemonic processes are not allowed to have children'
120 _cleanup()
--> 121 self._popen = self._Popen(self)
122 self._sentinel = self._popen.sentinel
123 # Avoid a refcycle if the target function holds an indirect
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/context.py in _Popen(process_obj)
281 def _Popen(process_obj):
282 from .popen_spawn_posix import Popen
--> 283 return Popen(process_obj)
284
285 class ForkServerProcess(process.BaseProcess):
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/popen_spawn_posix.py in init(self, process_obj)
30 def init(self, process_obj):
31 self._fds = []
---> 32 super().init(process_obj)
33
34 def duplicate_for_child(self, fd):
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/popen_fork.py in init(self, process_obj)
17 self.returncode = None
18 self.finalizer = None
---> 19 self._launch(process_obj)
20
21 def duplicate_for_child(self, fd):
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/popen_spawn_posix.py in _launch(self, process_obj)
45 try:
46 reduction.dump(prep_data, fp)
---> 47 reduction.dump(process_obj, fp)
48 finally:
49 set_spawning_popen(None)
~/anaconda3/envs/usecases-gtda/lib/python3.8/multiprocessing/reduction.py in dump(obj, file, protocol)
58 def dump(obj, file, protocol=None):
59 '''Replacement for pickle.dump() using ForkingPickler.'''
---> 60 ForkingPickler(file, protocol).dump(obj)
61
62 #
AttributeError: Can't pickle local object 'prepare_worker..closure..wrapper'
Works otherwise with python 3.7.6.
The text was updated successfully, but these errors were encountered: