#!/usr/bin/env python3
#
# Copyright 2018, Alexis Maldonado (amaldo@cs.uni-bremen.de).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
#


import sys
import os
import csv

import yaml
from gtin import GTIN
import bpy

from PIL import Image
import shutil


def scan_model_dirs(model_top_dir, data, dan_ean_map):
    '''when this function gets called, we should be at a directory containing one model per subdirectory'''

    print("Scanning for models in directory = {}".format(os.getcwd()))
    model_dirs = os.listdir()
    
    found_dans = 0
    
    for directory in model_dirs:
        try:
            os.chdir(directory)
        except:
            print("{} is not a directory, skipping".format(directory))
            continue
        
        files = os.listdir()
        
        
        dan = ''
        dae_filename = ''
        png_filename = ''
        fbx_filename = ''        
        found_bb_file = False
        model_version = ''
        
        for filename in files:
            
            #search for the bounding-box centered collada
            
            if ( (filename.find('.dae') > 0) and (filename.find("_bb") > 0) ):
                dae_filename = filename
                
                #while we are here, extract the DAN
                begin = filename.find("AN") + 2
                end = filename.find("_bb")
                dan = filename[begin:end]
                if len(dan) != 6:
                    print("Error: something wrong with file: {} in {}/{}".format(filename, model_top_dir, directory), file=sys.stderr)
                    break
                #print("Found DAN=" + str(dan))
                found_dans += 1
                
                #Now check for a version number. If it is not there, assume '00'
                ver_begin = filename.find("_v")
                if (ver_begin > 0):
                    ver_begin += len("_v")
                    ver_end = ver_begin + 2
                    model_version = filename[ver_begin:ver_end]
                    #print("Found a versioned file: {} vers {}".format(filename, model_version))
                else:
                    model_version = '00'

        for filename in files:
            #if it did not find the _bb.dae file, look for any collada
            if ( (dae_filename == '') and (filename.find(".dae") > 0) ):
                dae_filename = filename
                
                #while we are here, extract the DAN
                begin = filename.find("AN") + 2
                end = filename.find(".dae")
                dan = filename[begin:end]
                if len(dan) != 6:
                    print("Error: something wrong with file: {} in {}/{}".format(filename, model_top_dir, directory), file=sys.stderr)
                    break
                #print("Found DAN=" + str(dan))
                found_dans += 1
                
                #Now check for a version number. If it is not there, assume '00'
                ver_begin = filename.find("_v")
                if (ver_begin > 0):
                    ver_end = ver_begin + 2
                    model_version = filename[ver_begin:ver_end]
                else:
                    model_version = '00'                
                


            if( filename.find(".png") > 0):
                png_filename = filename

            if( filename.find(".fbx") > 0 ):
                fbx_filename = filename
 
        
        #Check if any fields are missing
        if ( (dan == '') or (dae_filename == '') or (png_filename == '') or (fbx_filename == '') ):
            print("Did not find one or more files in directory: {}".format(directory))
        else:
            #All the fields are there
            #Ready to add a new register to the data
            
            #Check if the DAN is already in the database
            if (dan in data) and (model_version in data[dan]):
                    print("DAN {} version {} is already in the database. directory1= {} directory2= {}/{}".format(dan, model_version, data[dan][model_version]['model_path'], model_top_dir, directory), 
                      file=sys.stderr)
            else:
                print('adding MODEL with DAN= {}'.format(dan))
                #Actually add the register
                model_path = "{}/{}".format(model_top_dir, directory)
                
                article_data = {}
                article_data['model_path'] = model_path
                article_data['filename_dae'] = "{}".format(dae_filename)
                article_data['filename_png'] = "{}".format(png_filename)
                article_data['filename_fbx'] = "{}".format(fbx_filename)
                
                short_dan = dan
                long_dan = str(GTIN(raw=short_dan))
                art_gtin = str(dan_ean_map[long_dan])
                
                article_data['gtin'] = art_gtin
                article_data['dan'] = long_dan
                article_data['dan_raw'] = short_dan
                
                print('Directory: {}'.format(os.getcwd()))
                model_dim = bl_get_dimensions('{}'.format(dae_filename))
                
                article_data['bounding_box_delta_x'] = model_dim[0]
                article_data['bounding_box_delta_y'] = model_dim[1]
                article_data['bounding_box_delta_z'] = model_dim[2]
                                
                
                
                #If there is another record, load it to add to that dict
                version_record = {}
                if (art_gtin in data):
                    version_record = data[art_gtin]
                
                version_record[model_version] = article_data
                    
                #add or replace the record under the version number to the big data structure
                data[str(art_gtin)] = version_record
 

        os.chdir("..")
            
                
    print("Out of {} directories in {}, found {} DANs".format(len(model_dirs), model_top_dir,  found_dans))
    

def main():
    print('lala')
    
def get_dan_ean_map(csv_filename):
    if csv_filename is None:
        csv_filename = 'dan_ean_mapping.csv'
    dan_ean = {}
    
    with open(csv_filename) as csvfile:
        ean_dan_reader = csv.reader(csvfile)
        for row in ean_dan_reader:
            print(row)
            #try:
            if (True):
                if row[0].isdigit():
                    dan_file = row[1]
                    dan = ''
                    print('dan_file: {}'.format(dan_file))

                    dan_s = dan_file.split(' ')
                    if (len(dan_s) == 1):
                        dan = str(dan_s[0])
                    else:
                        dan = str(dan_s[0]) + str(dan_s[1])

                    print('dan: {}'.format(dan))
                    dan_ean[dan]= row[2]
                else:
                    print('Not digit. Throwing away one row: {}'.format(row))
            #except:
            #    print('Exception. Throwing away one row: {}'.format(row))
    return(dan_ean)
        

def clean_dans(dan_ean_map):
    
    clean_map = {}
    
    from gtin import GTIN
    for dan in dan_ean_map:
        try:
            clean_map[GTIN(dan)] = GTIN(dan_ean_map[dan])
        except:
            print("Detected error in DAN/EAN: {}/{}".format(dan, dan_ean_map[dan]))
            
    return(clean_map)

def test_dan_ean_map():
    
    dan_ean_map = get_dan_ean_map('/home/amaldo/work/refills/rename_models/dan_ean_mapping.csv')
    
    print(dan_ean_map)
    
def bl_get_dimensions(collada_filename):
    #empty the scene
    bl_clear_scene()
    
    #Load the model
    bpy.ops.wm.collada_import(filepath=collada_filename, import_units=False, fix_orientation=False)  #FIXME: Check these options
    
    #Select the model
    bpy.ops.object.select_all(action='TOGGLE')
    bpy.ops.object.select_all(action='TOGGLE')

    #Some of the objects don't have the rotation applied, and the dimensions are in the wrong axes
    #bpy.ops.object.transform_apply(rotation=True)
    #Better apply scale, just in case
    bpy.ops.object.transform_apply(rotation=True, scale=True)

    bpy.ops.object.select_all(action='TOGGLE')
    bpy.ops.object.select_by_type(type='MESH')
    
    if (len(bpy.context.selected_objects) > 1) :
        print("[WARNING] More than one mesh in the file. Taking the first one.")

    d = bpy.context.selected_objects[0].dimensions
    
    return([d[0],d[1],d[2]])

def bl_clear_scene():
    bpy.ops.object.select_all(action='TOGGLE')
    bpy.ops.object.select_all(action='TOGGLE')
    bpy.ops.object.delete(use_global=False)

    

def main():
    #if len(sys.argv) < 2:	
    #    print('Usage: %s <directory>' %(sys.argv[0]))
    #    sys.exit()
    
    #base_dir = sys.argv[1]
    #os.chdir(base_dir)

    #Let us assume that we are being run in the scripts directory
    os.chdir('..')
    repo_base_dir = os.getcwd()
    print('Expecting the model repository here: {}'.format(repo_base_dir))


    models_directory = repo_base_dir + '/models/'
    os.chdir(models_directory)
    
    
    print("Models working in directory: {}".format(os.getcwd()))
    
    dan_ean_map = get_dan_ean_map(repo_base_dir + '/data/dan_ean_mapping.csv')
    
    
    #dict to hold all the data
    data = {}
    
    
    model_top_dirs = ['naive_box_meshes','andrei_student', 'alexis_student']
    #model_top_dirs = ['naive_box_meshes']
    
    for model_top_dir in model_top_dirs:
        os.chdir(model_top_dir)
        scan_model_dirs(model_top_dir, data, dan_ean_map)
        os.chdir("..")
    
    print("len(data)={}".format(len(data)))
    print("Current dir BEFORE={}".format(os.getcwd()))
    
    #At this point we are in the 'models' directory. Inside there should be 'Alexis_student' and  'Andrei_student'.
    
    #Now autogenerate some low-resolution models
    for gtin in data:
        for ver in data[gtin]:
            model = data[gtin][ver]
            
            model_path = model['model_path']
            filename_dae = model['filename_dae']
            filename_png = model['filename_png']
            
            print(model)
            
            autogen_path = '{}/autogen'.format(model_path)
            lowres_path = '{}/autogen/low-res'.format(model_path)
            
            
            #if autogen dir is not there, create it
            if (not os.path.exists(autogen_path)):
                os.mkdir(autogen_path)
            
            lowres_there = os.path.exists(lowres_path)
            
            if (lowres_there):
                #if lowres dir is there, delete it and start fresh
                shutil.rmtree(lowres_path)
                            
            os.mkdir(lowres_path)
            

            print("Current dir={}".format(os.getcwd()))
            shutil.copy('{}/{}'.format(model_path, filename_dae), '{}/autogen/low-res/{}'.format(model_path, filename_dae))
            source_img = Image.open('{}/{}'.format(model_path, filename_png), mode='r')
            resize_ratio = 0.2
            new_res = [ int(source_img.size[0] * resize_ratio), int(source_img.size[1] * resize_ratio)]
            small_image = source_img.resize( new_res, Image.LANCZOS)
            small_image.save('{}/autogen/low-res/{}'.format(model_path, filename_png), "PNG")
            
            #Add the fields to the map
            model['lowres_model_path'] = '{}/autogen/low-res'.format(model_path)
            model['lowres_filename_dae'] = filename_dae
            model['lowres_filename_png'] = filename_png
    
    
    
    #Now save the data description file
    yaml_save_filename = 'model_data.yaml'
    
    with open(yaml_save_filename, 'w') as yaml_file:
        print("Saving to file: {}".format(yaml_save_filename))
        yaml.dump(data, yaml_file, Dumper=yaml.CDumper, default_flow_style=False)
        
        
        
    print('Exiting')
     
if (__name__ == '__main__'):
    main()
    
