Skip to content

Commit

Permalink
stockholm
Browse files Browse the repository at this point in the history
  • Loading branch information
Panos Achlioptas committed Jul 25, 2018
1 parent eea79fe commit 968d7c5
Show file tree
Hide file tree
Showing 3 changed files with 302 additions and 113 deletions.
226 changes: 121 additions & 105 deletions notebooks/train_latent_gan.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 2,
"metadata": {
"collapsed": false,
"deletable": true,
Expand All @@ -28,17 +28,8 @@
"from latent_3d_points.src.in_out import snc_category_to_synth_id, create_dir, PointCloudDataSet, \\\n",
" load_all_point_clouds_under_folder\n",
"\n",
"from latent_3d_points.src.tf_utils import reset_tf_graph"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from latent_3d_points.src.tf_utils import reset_tf_graph\n",
"\n",
"from latent_3d_points.src.vanilla_gan import Vanilla_GAN\n",
"from latent_3d_points.src.w_gan_gp import W_GAN_GP\n",
"\n",
Expand All @@ -48,7 +39,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 3,
"metadata": {
"collapsed": false,
"deletable": true,
Expand All @@ -63,7 +54,7 @@
},
{
"cell_type": "code",
"execution_count": 23,
"execution_count": 4,
"metadata": {
"collapsed": false
},
Expand All @@ -77,38 +68,23 @@
}
],
"source": [
"# Setup data/AE parameters\n",
"top_out_dir = '../data/' # Use to save Neural-Net check-points etc.\n",
"top_in_dir = '../data/shape_net_core_uniform_samples_2048/' # Top-dir of where point-clouds are stored.\n",
"# Set DATA/AE parameters\n",
"\n",
"top_out_dir = '../data/' # Use to save Neural-Net check-points etc.\n",
"top_in_dir = '../data/shape_net_core_uniform_samples_2048/' # Top-dir of where point-clouds are stored.\n",
"\n",
"ae_configuration = '../data/single_class_ae/configuration' # AE model-description. You can alternatively, \n",
" # use your own way to load a pre-trained AE.\n",
"\n",
"ae_epoch = 100 # Model/epoch of AE to load.\n",
"bneck_size = 128 # Bottleneck-AE size\n",
"\n",
"experiment_name = 'latent_gan'\n",
"n_pc_points = 2048 # Number of points per model.\n",
"bneck_size = 128 # Bottleneck-AE size\n",
"ae_loss = 'emd' # Loss to optimize: 'emd' or 'chamfer'\n",
"n_pc_points = 2048 # Number of points per model.\n",
"\n",
"class_name = raw_input('Give me the class name (e.g. \"chair\"): ').lower()"
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"6778 pclouds were loaded. They belong in 1 shape-classes.\n"
]
}
],
"source": [
"syn_id = snc_category_to_synth_id()[class_name]\n",
"class_dir = osp.join(top_in_dir , syn_id)\n",
"all_pc_data = load_all_point_clouds_under_folder(class_dir, n_threads=8, file_ending='.ply', verbose=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -117,21 +93,7 @@
},
"outputs": [],
"source": [
"synthetic_data_out_dir = osp.join(top_out_dir, 'OUT/synthetic_samples/', experiment_tag)\n",
"create_dir(synthetic_data_out_dir)\n",
"train_dir = osp.join(top_data_dir, 'OUT/iclr/nn_models', experiment_tag)\n",
"create_dir(train_dir)"
]
},
{
"cell_type": "code",
"execution_count": 22,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"# Set-up configuraton parameters.\n",
"# Set GAN, training parameters.\n",
"\n",
"# save_model = False\n",
"# class_name = raw_input('Give me the class type.\\n').lower()\n",
Expand All @@ -147,17 +109,21 @@
"init_lr = 0.0001\n",
"batch_size = 50\n",
"noise_params = {'mu':0, 'sigma': 0.2}\n",
"noise_dim = b_neck\n",
"noise_dim = bneck_size\n",
"beta = 0.5\n",
"n_syn_samples = train_data.num_examples # How many samples to produce in each save step.\n",
"n_out = [bneck_size]\n",
"accum_syn_data = []\n",
"train_stats = []"
"train_stats = []\n",
"\n",
"if save_synthetic_samples:\n",
" synthetic_data_out_dir = osp.join(top_out_dir, 'OUT/synthetic_samples/', experiment_tag)\n",
" create_dir(synthetic_data_out_dir)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": 5,
"metadata": {
"collapsed": false
},
Expand All @@ -166,85 +132,95 @@
"name": "stdout",
"output_type": "stream",
"text": [
"679 pclouds were loaded. They belong in 1 shape-classes.\n",
"5761 pclouds were loaded. They belong in 1 shape-classes.\n",
"338 pclouds were loaded. They belong in 1 shape-classes.\n"
"6778 pclouds were loaded. They belong in 1 shape-classes.\n"
]
}
],
"source": [
"top_data_dir = '/orions4-zfs/projects/optas/DATA/'\n",
"in_data = load_multiple_version_of_pcs('uniform_one', syn_id, n_classes=1)\n",
"train_data = in_data['train']"
"# Load point-clouds\n",
"syn_id = snc_category_to_synth_id()[class_name]\n",
"class_dir = osp.join(top_in_dir , syn_id)\n",
"all_pc_data = load_all_point_clouds_under_folder(class_dir, n_threads=8, file_ending='.ply', verbose=True)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"'/orions4-zfs/projects/optas/DATA/OUT/iclr/nn_models/l_gan_chair_mlp_with_split_1pc_usampled_bnorm_on_encoder_only_emd_bneck_128_denser_sampling'"
]
},
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
],
"source": []
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {
"collapsed": false,
"deletable": true,
"editable": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model restored in epoch 400.\n"
"Building Encoder\n",
"encoder_conv_layer_0 conv params = 256 bnorm params = 128\n",
"Tensor(\"single_class_ae_2/Relu:0\", shape=(?, 2048, 64), dtype=float32)\n",
"output size: 131072 \n",
"\n",
"encoder_conv_layer_1 conv params = 8320 bnorm params = 256\n",
"Tensor(\"single_class_ae_2/Relu_1:0\", shape=(?, 2048, 128), dtype=float32)\n",
"output size: 262144 \n",
"\n",
"encoder_conv_layer_2 conv params = 16512 bnorm params = 256\n",
"Tensor(\"single_class_ae_2/Relu_2:0\", shape=(?, 2048, 128), dtype=float32)\n",
"output size: 262144 \n",
"\n",
"encoder_conv_layer_3 conv params = 33024 bnorm params = 512\n",
"Tensor(\"single_class_ae_2/Relu_3:0\", shape=(?, 2048, 256), dtype=float32)\n",
"output size: 524288 \n",
"\n",
"encoder_conv_layer_4 conv params = 32896 bnorm params = 256\n",
"Tensor(\"single_class_ae_2/Relu_4:0\", shape=(?, 2048, 128), dtype=float32)\n",
"output size: 262144 \n",
"\n",
"Tensor(\"single_class_ae_2/Max:0\", shape=(?, 128), dtype=float32)\n",
"Building Decoder\n",
"decoder_fc_0 FC params = 33024 Tensor(\"single_class_ae_2/Relu_5:0\", shape=(?, 256), dtype=float32)\n",
"output size: 256 \n",
"\n",
"decoder_fc_1 FC params = 65792 Tensor(\"single_class_ae_2/Relu_6:0\", shape=(?, 256), dtype=float32)\n",
"output size: 256 \n",
"\n",
"decoder_fc_2 FC params = 1579008 Tensor(\"single_class_ae_2/decoder_fc_2/BiasAdd:0\", shape=(?, 6144), dtype=float32)\n",
"output size: 6144 \n",
"\n",
"INFO:tensorflow:Restoring parameters from ../data/single_class_ae/models.ckpt-100\n",
"Model restored in epoch 100.\n"
]
}
],
"source": [
"# Load Auto-Encoder\n",
"ae_id = '_'.join(['ae', class_name, ae_experiment_tag, str(n_pc_points), 'pts', str(b_neck), 'bneck', ae_loss])\n",
"ae_train_dir = osp.join(top_data_dir, 'OUT/iclr/nn_models/', ae_id)\n",
"ae_conf = Conf.load(osp.join(ae_train_dir, 'configuration'))\n",
"saved_epochs = read_saved_epochs(ae_conf.train_dir)\n",
"_, best_epoch = find_best_validation_epoch_from_train_stats(osp.join(ae_train_dir, 'train_stats.txt'))\n",
"if best_epoch % ae_conf.saver_step != 0: # Model was not saved at that epoch.\n",
" best_epoch += best_epoch % ae_conf.saver_step\n",
"ae_conf.encoder_args['verbose'] = False\n",
"ae_conf.decoder_args['verbose'] = False\n",
"# Load pre-trained AE\n",
"reset_tf_graph()\n",
"ae = PointNetAutoEncoder(ae_conf.experiment_name, ae_conf) \n",
"ae.restore_model(ae_conf.train_dir, best_epoch, verbose=True)"
"ae_conf = Conf.load(ae_configuration)\n",
"ae = PointNetAutoEncoder(ae_conf.experiment_name, ae_conf)\n",
"ae.restore_model(ae_conf.train_dir, ae_epoch, verbose=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 19,
"metadata": {
"collapsed": true
"collapsed": false
},
"outputs": [],
"source": [
"# Convert raw-data to latent codes.\n",
"# latent_codes = get_latent_codes(ae, train_data.point_clouds)\n",
"latent_codes = np.random.randn(5000, 128)\n",
"latent_codes = ae.get_latent_codes(all_pc_data.point_clouds)\n",
"train_data = PointCloudDataSet(latent_codes)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 19,
Expand Down Expand Up @@ -832,6 +808,46 @@
" plt.xlabel('Epochs.') \n",
" plt.ylabel('Loss.')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"* Εχεις κρατησει τα txts για του περισσοτερους AEs?\n",
"* You can manually read them and write them to the appropriate functions and then also \n",
"add some reading/creation function from raw txt for the configuration.\n",
"\n",
"\n",
"\n",
"\n",
"\n",
"# # TEMP\n",
"# TO load work with pre-trained SN models.\n",
"# import sys\n",
"# sys.path.append(\"../../../../Git_Repos/\")\n",
"# from research.iclr.helper import load_multiple_version_of_pcs\n",
"# # in_data = load_multiple_version_of_pcs('uniform_one', syn_id, n_classes=1)\n",
"# # train_data = in_data['train']\n",
"# !cat /orions4-zfs/projects/optas/DATA/OUT/iclr/nn_models/ae_chair_mlp_with_split_1pc_usampled_bnorm_on_encoder_only_2048_pts_128_bneck_emd/configuration.txt\n",
"\n",
"# ae_configuration = '/orions4-zfs/projects/optas/DATA/OUT/iclr/nn_models/ae_chair_mlp_with_split_1pc_usampled_bnorm_on_encoder_only_2048_pts_128_bneck_emd/configuration.txt\n",
"\n",
"ae_conf = Conf.load(ae_configuration)\n",
"\n",
"# saved_epochs = read_saved_epochs(ae_conf.train_dir)\n",
"# _, best_epoch = find_best_validation_epoch_from_train_stats(osp.join(ae_train_dir, 'train_stats.txt'))\n",
"# if best_epoch % ae_conf.saver_step != 0: # Model was not saved at that epoch.\n",
"# best_epoch += best_epoch % ae_conf.saver_step\n",
"# ae_conf.encoder_args['verbose'] = False\n",
"# ae_conf.decoder_args['verbose'] = False\n",
"# reset_tf_graph()\n",
"# ae = PointNetAutoEncoder(ae_conf.experiment_name, ae_conf) \n",
"# ae.restore_model(ae_conf.train_dir, best_epoch, verbose=True)"
]
}
],
"metadata": {
Expand Down
Loading

0 comments on commit 968d7c5

Please sign in to comment.