From f318fe11126af97538041e9f64da91584a7178e2 Mon Sep 17 00:00:00 2001 From: Aleksandar Bojchevski Date: Thu, 23 Nov 2023 13:14:56 +0000 Subject: [PATCH] update papers and news --- _bibliography/papers.bib | 35 ++++++++++++++++++++++++++++++++++- _news/cds.md | 7 +++++++ _news/kpa.md | 8 ++++++++ _news/papers_icml23.md | 6 ++++++ _news/papers_neurips23.md | 6 ++++++ 5 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 _news/cds.md create mode 100644 _news/kpa.md create mode 100644 _news/papers_icml23.md create mode 100644 _news/papers_neurips23.md diff --git a/_bibliography/papers.bib b/_bibliography/papers.bib index ee19895de516..ce07d4be2ea0 100644 --- a/_bibliography/papers.bib +++ b/_bibliography/papers.bib @@ -1,5 +1,38 @@ --- --- +@inproceedings{scholten23hierarchical, + author = {Scholten, Yan and Schuchardt, Jan and Bojchevski, Aleksandar G{\"u}nnemann, Stephan}, + booktitle = {Neural Information Processing Systems, {NeurIPS}}, + title = {Hierarchical Randomized Smoothing}, + year = {2023}, + category = {conference}, + abbr = {NeurIPS}, + abstract = {Real-world data is complex and often consists of objects that can be decomposed into multiple entities (e.g. images into pixels, graphs into interconnected nodes). Randomized smoothing is a powerful framework for making models provably robust against small changes to their inputs - by guaranteeing robustness of the majority vote when randomly adding noise before classification. Yet, certifying robustness on such complex data via randomized smoothing is challenging when adversaries do not arbitrarily perturb entire objects (e.g. images) but only a subset of their entities (e.g. pixels). As a solution, we introduce hierarchical randomized smoothing: We partially smooth objects by adding random noise only on a randomly selected subset of their entities. By adding noise in a more targeted manner than existing methods we obtain stronger robustness guarantees while maintaining high accuracy. We initialize hierarchical smoothing using different noising distributions, yielding novel robustness certificates for discrete and continuous domains. We experimentally demonstrate the importance of hierarchical smoothing in image and node classification, where it yields superior robustness-accuracy trade-offs. Overall, hierarchical smoothing is an important contribution towards models that are both - certifiably robust to perturbations and accurate.}, + arxiv = {2310.16221}, +} +@inproceedings{mustafa23aregats, + author = {Mustafa, Nimrah and Bojchevski, Aleksandar and Burkholz, Rebekka}, + booktitle = {Neural Information Processing Systems, {NeurIPS}}, + title = {Are GATs Out of Balance?}, + year = {2023}, + category = {conference}, + abbr = {NeurIPS}, + abstract = {While the expressive power and computational capabilities of graph neural networks (GNNs) have been theoretically studied, their optimization and learning dynamics, in general, remain largely unexplored. Our study undertakes the Graph Attention Network (GAT), a popular GNN architecture in which a node's neighborhood aggregation is weighted by parameterized attention coefficients. We derive a conservation law of GAT gradient flow dynamics, which explains why a high portion of parameters in GATs with standard initialization struggle to change during training. This effect is amplified in deeper GATs, which perform significantly worse than their shallow counterparts. To alleviate this problem, we devise an initialization scheme that balances the GAT network. Our approach i) allows more effective propagation of gradients and in turn enables trainability of deeper networks, and ii) attains a considerable speedup in training and convergence time in comparison to the standard initialization. Our main theorem serves as a stepping stone to studying the learning dynamics of positive homogeneous models with attention mechanisms.}, + arxiv = {2310.07235}, +} +@inproceedings{zargarbashi23conformal, + author = {H. Zargarbashi, Soroush and Antonelli, Simone and Bojchevski, Aleksandar}, + booktitle = {International Conference on Machine Learning, {ICML}}, + title = {Conformal Prediction Sets for Graph Neural Networks}, + year = {2023}, + category = {conference}, + abbr = {ICML}, + abstract = {Despite the widespread use of graph neural networks (GNNs) we lack methods to reliably quantify their uncertainty. We propose a conformal procedure to equip GNNs with prediction sets that come with distribution-free guarantees – the output set contains the true label with arbitrarily high probability. Our post-processing procedure can wrap around any (pretrained) GNN, and unlike existing methods, results in meaningful sets even when the model provides only the top class. The key idea is to diffuse the node-wise conformity scores to incorporate neighborhood information. By leveraging the network homophily we construct sets with comparable or better efficiency (average size) and significantly improved singleton hit ratio (correct sets of size one). In addition to an extensive empirical evaluation, we investigate the theoretical conditions under which smoothing provably improves efficiency.}, + code = {https://github.com/soroushzargar/DAPS}, + html = {https://proceedings.mlr.press/v202/h-zargarbashi23a.html}, + pdf = {https://proceedings.mlr.press/v202/h-zargarbashi23a/h-zargarbashi23a.pdf}, + selected = {true}, +} @inproceedings{akhondzadeh23probing, author = {Mohammad Akhondzadeh, Sadegh and Lingam, Vijay and Bojchevski, Aleksandar}, booktitle = {International Conference on Artificial Intelligence and Statistics, {AISTATS}}, @@ -192,7 +225,7 @@ @inproceedings{bojchevski2019adversarial arxiv = {1809.01093}, code = {https://github.com/abojchevski/node_embedding_attack}, award = {Oral}, - selected = {true}, + selected = {false}, slides = {slides_embedding_attack.pdf}, poster = {poster_embedding_attack.pdf}, talk = {https://www.videoken.com/embed/1zMVZKlxfU4?tocitem=1}, diff --git a/_news/cds.md b/_news/cds.md new file mode 100644 index 000000000000..f1f306fa9c47 --- /dev/null +++ b/_news/cds.md @@ -0,0 +1,7 @@ +--- +layout: post +date: 2023-09-01 +inline: true +--- + +I joined the [Center for Data and Simulation Science](https://cds.uni-koeln.de/en/) as a core scientist. \ No newline at end of file diff --git a/_news/kpa.md b/_news/kpa.md new file mode 100644 index 000000000000..01c3fbd25d41 --- /dev/null +++ b/_news/kpa.md @@ -0,0 +1,8 @@ +--- +layout: post +date: 2023-04-21 +inline: true +--- + +Our group joined the key profile area ["Intelligent Methods for Earth System Sciences"](https://imfess.uni-koeln.de/en/). + diff --git a/_news/papers_icml23.md b/_news/papers_icml23.md new file mode 100644 index 000000000000..92297737d7d0 --- /dev/null +++ b/_news/papers_icml23.md @@ -0,0 +1,6 @@ +--- +layout: post +date: 2023-04-24 +inline: true +--- +One paper on [conformal predictions sets for GNNs](/publications#zargarbashi23conformal) was accepted at ICML 2023. \ No newline at end of file diff --git a/_news/papers_neurips23.md b/_news/papers_neurips23.md new file mode 100644 index 000000000000..96d2629e6ec4 --- /dev/null +++ b/_news/papers_neurips23.md @@ -0,0 +1,6 @@ +--- +layout: post +date: 2023-09-15 +inline: true +--- +Two papers, one on [certificates](/publications#scholten23hierarchical) and one on [GATs](/publications#mustafa23aregats), were accepted at NeurIPS 2023. \ No newline at end of file