1
0
Fork 0
mirror of https://github.com/deepfakes/faceswap synced 2025-06-07 19:05:02 -04:00
faceswap/lib/utils.py
Clorr 34945cfcd7
Adding models as plugins + Face filtering (#53) + #39 + #43 + #44 + #49 (#61)
* Making Models as plugins

* Do not reload model on each image #39 + Adding FaceFilter #53

* Adding @lukaville PR for #43 and #44 (possibly)

* Training done in a separate thread

* Better log for plugin load

* Adding a prefetch to train.py #49
(Note that we prefetch 2 batches of images, due to the queue behavior)
+ More compact logging with verbose info included

* correction of DirectoryProcessor signature

* adding missing import

* Convert with parallel preprocessing of files

* Added coverage var for trainer

Added a var with comment. Feel free to add it as argument

* corrections

* Modifying preview and normalization of image + correction

* Cleanup
2018-01-31 18:56:44 +01:00

47 lines
No EOL
1.7 KiB
Python

import argparse
from pathlib import Path
from scandir import scandir
def get_folder(path):
output_dir = Path(path)
output_dir.mkdir(parents=True, exist_ok=True)
return output_dir
def get_image_paths(directory):
return [x.path for x in scandir(directory) if x.name.endswith('.jpg') or x.name.endswith('.jpeg') or x.name.endswith('.png')]
class FullHelpArgumentParser(argparse.ArgumentParser):
"""
Identical to the built-in argument parser, but on error
it prints full help message instead of just usage information
"""
def error(self, message):
self.print_help(sys.stderr)
args = {'prog': self.prog, 'message': message}
self.exit(2, '%(prog)s: error: %(message)s\n' % args)
# From: https://stackoverflow.com/questions/7323664/python-generator-pre-fetch
import threading
import queue as Queue
class BackgroundGenerator(threading.Thread):
def __init__(self, generator, prefetch=1): #See below why prefetch count is flawed
threading.Thread.__init__(self)
self.queue = Queue.Queue(prefetch)
self.generator = generator
self.daemon = True
self.start()
def run(self):
# Put until queue size is reached. Note: put blocks only if put is called while queue has already reached max size
# => this makes 2 prefetched items! One in the queue, one waiting for insertion!
for item in self.generator:
self.queue.put(item)
self.queue.put(None)
def iterator(self):
while True:
next_item = self.queue.get()
if next_item is None:
break
yield next_item