# Conferences.bib

@inproceedings{Landrin-SchweitzerPPSN2000,
author = {Landrin-Schweitzer, Y. and Lutton, E.},
title = {Perturbation theory for Evolutionary Algorithms: towards an
estimation of convergence speed},
booktitle = {Parallel Problem Solving from Nature - PPSN VI
6th International Conference},
editor = {Schoenauer, M. and Deb, K. and Rudolf, G. and Yao, X. and Lutton, E. and Merelo. J.J. and Schwefel, H.-P.},
year = {2000},
publisher = {Springer Verlag},
month = {September 16-20},
note = {LNCS 1917},
abstract = {When considering continuous spaces EA, a convenient tool to model these algorithms is perturbation theory. In this paper we present preliminary results, derived from Freidlin-Wentzell theory, related to the convergence of a simple EA model. The main result of this paper yields a bound on sojourn times of the Markov process in subsets centered around the maxima of the fitness function. Exploitation of this result opens the way to convergence speed bounds with respect to some statistical measures on the fitness function (likely related to irregularity).
},
pdf = {Papers/31_ppsn-198.pdf}
}

@inproceedings{Lutton-Curacao95,
author = {Lutton, Evelyne},
title = {Genetic Algorithms and the Optimisation of Fractal Functions : Deceptivity analysis},
booktitle = {Fractal Geometry and Self-Similar Phenomena, Symposium celebrating the 70th birthday of Benoît Mandelbrot},
note = {Curacao},
year = {1995},
month = {February 2-4}
}

@inproceedings{LDM-89,
author = {Lutton, Evelyne and Delmas, Mathilde and Ma\^itre, Henri},
title = {Une base de donn\'ees pour la repr\'esentation de sc\enes industrielles complexes d'ext\'erieur},
booktitle = {PIXIM},
pages = {17-31},
year = {1989},
month = {Septembre, 25-29}
}

@inproceedings{LuttonMaitre-89,
author = {Lutton, Evelyne and Ma\^itre, Henri},
title = {Etude des sym\'etries du probl\eme de perspective \a trois lignes},
booktitle = {7ème congr\es AFCET, RFIA},
pages = {537-546},
year = {1989},
note = {29 nov - 1er d\'ec}
}

@inproceedings{LMLL-90,
author = {Lutton, Evelyne and Ma\^itre, Henri and Lopez-Krahe, Jaime},
title = {Determining Vanishing points with Hough Transform},
booktitle = {Visual Communication and Image Processing'90, SPIE},
pages = {537-546},
year = {1990},
note = {Lausanne, Switzerland, 2-4 October}
}

@inproceedings{lutton92,
author = {Lutton, Evelyne and V\'ezien, Jean-Marc and Gagalowicz, Andr\'e},
title = {About Criteria for 3D Reconstruction of Planar Facets from a Stereo Pair},
booktitle = {ISCIS VII},
note = {2-4 November, Antalya, Turkey},
pages = { },
year = {1992}
}

@inproceedings{lutton93b,
author = {Lutton, Evelyne},
title = {3D Model-Based Stereo Reconstruction Using Coupled Markov Random Fields},
booktitle = {International  Conference CAIP'93, Computer Analysis
of Images and Pattern (IAPR)},
note = {September 13-15, Budapest, Hungary},
pages = { },
year = {1993}
}

@inproceedings{lutton93a,
author = {E. Lutton and J.-M. V\'ezien and A. Gagalowicz},
title = {Model-Based Stereo Reconstruction By Energy Minimization},
booktitle = {IMAGE'COM 93},
note = {23-25 Mars, Bordeaux, France},
pages = { },
year = {1993}
}

@inproceedings{Serfaty93a,
author = {V. Serfaty and A. Ackah-Miezan and E. Lutton and A. Gagalowicz},
title = {Photometric Analysis as an aid to 3D reconstruction of indoor scenes},
booktitle = {IS and T / SPIE Symposium on Electronic Imaging : Science and Technology},
note = {January 31 - February 5, San Jose, California, USA},
pages = { },
year = {1993}
}

@inproceedings{Serfaty93b,
author = {V. Serfaty and A. Acka-Miezan and E. Lutton and A. Gagalowicz},
title = {Towards a Visual Model for Robot Vision : From Wireframe to Photometric Representation},
booktitle = {IMAGE'COM 93},
note = {23-25 Mars, Bordeaux, France},
pages = { },
year = {1993}
}

@inbook{Leblanc-EA97,
author = {Leblanc, Benoit  and Lutton, Evelyne and Allouche, Jean-Paul},
title = {Artificial Evolution, European Conference, AE 97, Nimes, France, October 1997, Selected papers },
chapter = {Inverse problems for finite automata:a solution based on Genetic Algorithms},
publisher = {Springer Verlag},
year = {1997},
volume = {Lecture Notes in Computer Science},
abstract = {The use of heuristics such as Genetic Algorithm optimisation methods is appealing in a large range of inverse problems. The problem presented here deals with the mathematical analysis of sequences generated by finite automata. There is no known general exact method for solving the associated inverse problem. GA optimisation techniques can provide useful results, even in the very particular area of mathematical analysis. This paper presents the results we have obtained on the inverse problem for fixed point automata. Software implementation has been developed with the help of "ALGON", our home-made Genetic Algorithm software.
},
pdf = {Papers/61_autogen.pdf}
}

@inproceedings{Leblanc-ICEC98,
author = {Leblanc, Benoit  and Lutton, Evelyne},
title = {Bitwise regularity and GA-hardness},
booktitle = {ICEC 98, May 5-9, Anchorage, Alaska},
year = {1998},
abstract = {We present in this paper a theoretical analysis that relates an irregularity measure of a fitness function to the so-called GA-deception. This approach is a continuation of a previous work that has presented a deception analysis of Holder functions. The analysis developed here is a generalization of this work in two ways: we first use a "bitwise regularity" instead of a Holder exponent as a basis for our deception analysis, second, we perform a similar deception analysis of a GA with uniform crossover. We finally propose to use the bitwise regularity coefficients in order to analyze the influence of a chromosome encoding on the GA efficiency, and we present experiments with Gray encoding.},
pdf = {Papers/52_IEEEcq.pdf}
}

@inproceedings{eljlv93,
author = {L\'{e}vy V\'{e}hel, Jacques  and Lutton, Evelyne},
title = {Optimization of Fractal Functions Using Genetic Algorithms},
booktitle = {Fractal 93},
note = {London},
year = {1993},
abstract = {In this work, we investigate the difficult problem of the optimization of fractal functions. We first derive some relations between the local scaling exponents of the functions, the sampling rate, and the accuracy of the localization of the optimum, both in the domain and the range of the functions. We then apply these ideas to the resolution of the inverse problem for Iterated Function System (IFS) using a Genetic Algorithm. In the conditions of study (2D problem for sets), the optimization process yields the optimum with a good precision and within a tractable computing time.}
}

@inproceedings{Daoudi94,
author = {K. Daoudi and E. Lutton and J. Levy Vehel},
title = {Fractal Modeling of Speech Signals},
booktitle = {Fractals in Engineering},
note = {1-4 June, Montreal},
year = {1994},
abstract = {In this paper, we present a method for speech signal analysis and synthesis based on IFS theory. We consider a speech signal and the graph of a continuous function whose irregularity, measured in terms of its local Hölder exponents, is arbitrary. We extract a few remarkable points in the signal and perform a fractal interpolation between them using a classical technique based on IFS theory. We thus obtain a functional representation of the speech signal, which is well adapted to various applications, as for instance voice interpolation. },
pdf = {Papers/89_Daou.pdf}
}

@inproceedings{lutton94b,
author = {Evelyne Lutton and Patrice Martinez},
title = {D\'etection de
primitives g\'eom\'etriques bidimensionnelles dans les images \a l'aide d'un
algorithme g\'en\'etique},
booktitle = {Evolution Artificielle 94},
year = {1994},
note = {Toulouse, France,  19-23 Septembre, {\em english version published as Book Chapter, see http link}},
abstract = {Nous étudions l'emploi des algorithmes génétiques dans le cadre de l'extraction de primitives (segments, cercles, quadrilatères, etc ...) dans des images. Cette approche est complémentaire de l'approche classique par transformée de Hough, dans le sens où les algorithmes génétiques se révèlent efficaces là où la Transformée de Hough devient trop complexe et trop gourmande en espace mémoire, c'est-à-dire dans les cas où l'on recherche des primitives ayant plus de 3 ou 4 paramètres.

En effet, les algorithmes génétiques, employés en tant qu'outil d'optimisation stochastique, sont réputés coÃ»teux en temps de calcul, mais se révèlent efficaces dans les cas où les fonctions à optimiser sont très irrégulières et de forte dimensionnalité. La philosophie de la méthode que nous présentons est donc très similaire à celle de la transformée de Hough, qui est de rechercher un optimum dans un espace de paramètres. Cependant, nous verrons que les implantations algorithmiques diffèrent.

Cette approche de l'extraction de primitives par algorithmes génétiques n'est pas une idée nouvelle : nous avons repris et amélioré une technique originale proposée par Roth et Levine en 1992. Nous pouvons résumer notre apport sur cette technique en trois points principaux:

* nous avons utilisé des images de distances pour "adoucir" la fonction à optimiser (aussi appelée "fitness"),

* pour détecter plusieurs primitives à la fois, nous avons implanté et amélioré une technique de partage de la population (technique de "sharing"),

* et enfin, nous avons appliqué quelques résultats théoriques récemments établis à propos des probabilités de mutations, ce qui nous a permis d'améliorer, notamment, les temps d'exécution.},
pdf = {Papers/86_PrimitivesEA94.pdf},
}

@inproceedings{lutton_martinez94,
author = {Evelyne Lutton and Patrice Martinez},
title = {{A Genetic Algorithm for the Detection of
2D Geometric Primitives in Images}},
booktitle = {12-ICPR},
year = {1994},
note = {Jerusalem, Israel, 9-13 October},
abstract = { We investigate the use of genetic algorithms (GAs) in the framework of image primitives extraction (such as segments, circles, ellipses or quadrilaterals). This approach completes the well-known Hough Transform, in the sense that GAs are efficient when the Hough approach becomes too expensive in memory, i.e. when we search for complex primitives having more than 3 or 4 parameters.

Indeed, a GA is a stochastic technique, relatively slow, but which provides with an efficient tool to search in a high dimensional space. The philosophy of the method is very similar to the Hough Transform, which is to search an optimum in a parameter space. However, we will see that the implementation is different.

The idea of using a GA for that purpose is not new, Roth and Levine have proposed a method for 2D and 3D primitives in 1992. For the detection of 2D primitives, we re-implement that method and improve it mainly in three ways :

* by using distance images instead of directly using contour images, which tends to smoothen the function to optimize,

* by using a GA-sharing technique, to detect several image primitives in the same step,

* by applying some recent theoretical results on GAs (about mutation probabilities) to reduce convergence time.

},
pdf = {Papers/85_ICPR94_Final.pdf}
}

@inproceedings{Lutton95a,
author = {E. Lutton and G. Cretin and J. L\'evy~V\'ehel and P.
Glevarec and C. Roll},
title = {Mixed IFS: resolution of the inverse problem using Genetic Programming},
booktitle = {Evolution Artificielle},
date = {4-6 Septembre},
year = 1995
}

@inproceedings{Cretin-FE97,
author = {Cretin, Guillaume and Lutton,Evelyne},
title = {Fractal Image Compression : Experiments on HV Partitioning and Linear Combination of Domains},
booktitle = {Fractals in Engineering 97},
year = {1997},
organization = {INRIA},
note = {Arcachon, France, June 25-27}
}

@inproceedings{IFS-GECCO99,
author = {Collet, Pierre and Lutton, Evelyne and Raynal, Fr\'ed\'eric and Schoenauer, Marc},
title = {Individual GP: an alternative viewpoint for the resolution of complex problems.},
booktitle = {GECCO99, Genetic and Evolutionary Computation Conference, July  13 - 17, 1999, Orlando, Florida,  USA.},
year = {1999},
abstract = {An unususal GP implementation is proposed, based on a more "economic" exploitation of the GP algorithm: the "individual" approach, where each individual of the population embodies a single function rather than a set of functions. The final solution is then a set of individuals. Examples are presented where results are obtained more rapidly than with the conventional approach, where all individuals of the final generation but one are discarded.},
pdf = {Papers/43_GP-467.pdf}
}

@inproceedings{Polar-IFS-CEC99,
author = {Raynal, Fr\'ed\'eric and Lutton, Evelyne and Collet, Pierre and Schoenauer, Marc},
title = {Manipulation of Non-Linear IFS attractors using Genetic Programming},
year = {1999},
booktitle = {CEC99, Congress on Evolutionary Computation, July 6-9, Washington DC. USA.},
abstract = {Non-linear Iterated Function Systems (IFSs) are very powerful mathematical objects related to fractal theory, that can be used in order to generate (or model) very irregular shapes. We investigate in this paper how Genetic Programming techniques can be efficiently exploited in order to generate randomly or interactively artistic "fractal" 2D shapes. Two applications are presented for different types of non-linear IFSs:

-- interactive generation of Mixed IFSs attractors using a classical GP scheme,

-- random generation of Polar IFSs attractors based on an "individual" approach of GP.},
pdf = {Papers/42_PapierCEC99-Final.pdf}
}

@inproceedings{EvJlv_EVOIASP2001,
author = {L\'evy V\'ehel, Jacques and  Lutton, Evelyne},
title = {Evolutionary signal enhancement based on H\"{o}lder regularity analysis},
booktitle = {EVOIASP2001 Workshop, Como Lake, Italy, Springer Verlag, LNCS 2038},
year = {2001},
abstract = {We present an approach for signal enhancement based on the analysis of the local Hölder regularity. The method does not make explicit assumptions on the type of noise or on the global smoothness of the original data, but rather supposes that signal enhancement is equivalent to increasing the Hölder regularity at each point. The problem of finding a signal with prescribed regularity that is as near as possible to the original signal does not admit a closed form solution in general. Attempts have been done previously on an analytical basis for simplified cases. We address here the general problem with the help of an evolutionary algorithm. Our method is well adapted to the case where the signal to be recovered is itself very irregular, e.g. nowhere differentiable with rapidly varying local regularity. In particular, we show an application to SAR image denoising where this technique yields good results compared to other algorithms. The implementation of the evolutionary algorithm has been done using the EASEA (EAsy specification of Evolutionary Algorithms) language.
},
pdf = {Papers/25_DenoiseFinal.pdf}
}

@inproceedings{Hamda-CANUM2000,
author = {Hamda, Hatem and  Jouve, Fran\c{c}ois and  Lutton, Evelyne and  Schoenauer, Marc and  Sebag, Mich\ele},
title = {Unstructured Representations in Evolutionary Topological Optimum Design},
booktitle = {ESAIM, Actes du 32ème congrès d'analyse numérique CANUM2000},
year = {2000}
}

@inproceedings{Bolis2001,
author = {Bolis, Enzo and Zerbi, Christian and Collet, Pierre and Louchet, Jean and Lutton, Evelyne},
title = {A GP Artificial Ant for Image processing : preliminary experiments with EASEA},
booktitle = {EVOIASP2001},
year = 2001,
editor = {Springer Verlag,Lecture Notes on Computer Science,  LNCS 2038},
note = {Lake Como, Italy},
pages = {246--255},
publisher = {},
abstract = {This paper describes how animat-based "food foraging" techniques may be applied to the design of low-level image processing algorithms. First, we show how we implemented the food foraging application using the EASEA software package. We then use this technique to evolve an animat and learn how to move inside images and detect high-gradient lines with a minimum exploration time. The resulting animats do not use standard "scanning + filtering" techniques but develop other image exploration strategies close to contour tracking. Experimental results on grey level images are presented
},
pdf = {Papers/22_EuroGPFinal.pdf}
}

@inproceedings{LCL-oct2001,
author = {Lutton, E. and Collet, P. and Louchet, J.},
title = {EASEA comparisons on test functions: GAlib versus EO},
booktitle = {EA01 Conference on Artificial Evolution},
year = {2001},
month = {October},
abstract = {The EASEA language (EAsy Specification of Evolutionary
Algorithms) was created in order to allow scientists to concentrate on
evolutionary algorithm design rather than implementation. EASEA
currently supports two C++ libraries (GALib and EO) and a JAVA library
for the DREAM. The aim of this paper is to assess the quality of
EASEA-generated code through an extensive test procedure comparing the
implementation for EO and GALib of the same test functions.},
pdf = {Papers/EASEAComparisonLNCS.pdf}
}

@inproceedings{LLBT-2002,
author = {Leblanc, Benoît and Toulhoat, Hervé and Braunschweig, Bertrand and Lutton, Evelyne},
title = {Mixing Monte Carlo moves more efficiently with an evolutionary algorithm},
booktitle = {Division of Computers in Chemistry for the 23rd ACS National Meeting},
year = {2002},
month = {April, 7-11},
note = {{\bf Winner of a CCG Excellence Award}},
abstract = {When considering Markov Chain Monte Carlo sampling in the context of molecular simulations it is generally required to apply different types of Monte Carlo moves. The relative frequencies for each type of move are usually empirically chosen from ranges that appears reasonable, but rather in an arbitrary manner. Here we propose an evolutionary algorithm (population-based stochastic optimizer) that optimizes these frequencies in order to improve the sampling efficiency. We show results for NVT and NPT MC equilibrations of linear polyethylene chains in dense amorphous state, a prototypical case for which sampling efficiency is critical. Making use of problem dependent criteria, this algorithm improves the quality of simulation. Finally we also apply the same algorithm to improve the Parallel Tempering technique, in order to optimize at the same time the relative frequencies of Monte Carlo moves and the relative frequencies of swapping between sub-systems simulated at different temperatures.},
pdf = {Papers/9_extabsACS.pdf}
}

@inproceedings{LLBT-EA01,
author = {Leblanc, B. and Lutton, E. and Braunschweig, B. and Toulhoat, H.},
title = {History and Immortality in Evolutionary Computation},
booktitle = {Evolution Artificielle, EA01},
year = {2001},
month = {October},
abstract = {When considering noisy fitness functions for some CPU-time consuming applications, a trade-off problem arise: how to reduce the influence of the noise while not increasing too much computation time. In this paper, we propose and experiment some new strategies based on an exploitation of historical information on the algorithm evolution, and a non-generational evolutionary algorithm.},
pdf = {Papers/18_llbt-ea01.pdf}
}

@inproceedings{LLBT-may01,
author = {Leblanc, B. and Lutton, E. and Braunschweig, B. and Toulhoat, H.},
title = {Improving molecular simulation : a meta optimisation of Monte Carlo parameters},
booktitle = {CEC2001},
year = {2001},
month = {May 27-30},
abstract = {We present a new approach to perform molecular simulations using evolutionary algorithms. The main application of this work is the simulation of dense amorphous polymers and the goal is to improve the efficiency of sampling, in other words to obtain valid samples from the phase state more rapidly. Our approach is based on parallel Markovian Monte Carlo simulations of the same physico-chemical system, where we optimise some Monte Carlo parameters by means of a real coded genetic algorithm.},
pdf = {Papers/23_CEC2001-354.pdf}
}

@inproceedings{Chapuis-Lutton-2001,
author = {Chapuis, J. and Lutton, E. },
title = {ArtiE-Fract : Interactive Evolution of Fractals},
booktitle = {4th International Conference on Generative Art},
year = {2001},
month = {December 12-14},
abstract = {ArtiE-Fract is a user friendly software for the creation of fractal images. It is based on an interactive evolutionary algorithm.

Evolutionary algorithms (EA) are nowadays known as powerful stochastic optimization techniques, and can be considered as a computer implementation of a Darwinian evolution model. Their main characteristic is that they manipulate population of individuals (that represent solutions, points of a search space, programs, rules, images, signals, etc ...), and involve a set of operations (selection, mutation, crossover) applied stochastically to each individual, in order to simulate a sequence of generations.  If correctly designed, this dynamic stochastic process concentrates the population on the global optimum of the search space.

However, EA can be used for other purpose than pure optimization, for example, generation of artistic pictures. The appropriate tool is interactive EA, i.e. an EA where the function to be optimized is partly set by the user, in order to optimize something related to the "user satisfaction". ArtiE-Fract evolves a population of fractal images, and displays it via an interface.

More precisely, these fractal images are encoded in individuals as sets of contractive non-linear 2D functions (affine and non-affine), defined either in Cartesian or polar coordinates. Each set of these contractive functions define an IFS (Iterated Function System), to which a particular 2D image, its attractor, is associated.

In ArtiE-Fract the interactivity is twofold:

* the user can guide the EA by giving notations to each image of the population, via a main window that display the whole population.

* or he can directly manipulate the images via a  specialized window, and then add or replace a modified individual in the current population (this is a sort of interactive "local" optimization according to his taste). A large set of geometric, colorimetric, structural modification are available at this stage. Moreover, due to the IFS model, some control points can be defined on the attractor images (fixed points) that help to distort the shape in a convenient, but non trivial, manner.

The ArtiE-Fract interface has been carefully designed in order to give access to a wide variety of parameters. This, together with the two particularit ies of giving access to unusual fractal images (non-linear IFS), and allowing the user to interfere at any time with the evolutionary process, make of this software a versatile and user-friendly artistic image generation tool.
},
pdf = {Papers/12_ArtieFract.pdf}
}

@inproceedings{LCC-EvoMUSART03,
author = {Lutton, Evelyne and Cayla, Emmanuel and Chapuis, Jonathan},
title = {ArtiE-Fract: The Artist's Viewpoint},
booktitle = {EvoMUSART2003, 1st European Workshop on Evolutionary Music and Art},
year = {2003},
month = {April, 14-16},
publisher = {LNCS, Springer Verlag},
abstract = {ArtiE-Fract is an interactive evolutionary system designed for artistic exploration of the space of fractal 2D shapes. We report in this paper an experiment performed with an artist, the painter Emmanuel Cayla. The benefit of such a collaboration was twofold: first of all, the system itself has evolved in order to better fit the needs of non-computer-scientist users, and second, it has initiated an artistic approach and open up the way to new possible design outputs.
},
pdf = {Papers/134_lutton.pdf}
}

@inproceedings{LCL-EuroGP03,
author = {Landrin-Schweitzer, Yann and Collet, Pierre  and Lutton, Evelyne},
title = {Interactive GP for Data Retrieval in Medical Databases},
booktitle = {EUROGP'03},
year = {2003},
month = {April, 14-16},
publisher = {LNCS, Springer Verlag},
abstract = {We present in this paper the design of ELISE, an interactive GP system for document retrieval tasks in very large medical databases. The components of ELISE have been tailored in order to produce a system that is capable of suggesting documents related to the query that may be of interest to the user, thanks to evolved profiling information.

Tests on the "Cystic Fibrosis Database" benchmark show that, while suggesting original documents by adaptation of its internal rules to the context of the user, ELISE is able to improve its recall rate.
},
pdf = {Papers/133_Elise-Final-EuroGP03.pdf}
}

@inproceedings{LCLP-SAC03,
author = {Landrin-Schweitzer, Yann and Collet, Pierre  and Lutton, Evelyne and Prost, Thierry},
title = {Introducing Lateral Thinking in Search Engines with InteractiveEvolutionary Algorithms},
booktitle = {Annual ACM Symposium on Applied Computing (SAC 2003), Special Track on"Computer Applications in Health Care" (COMPAHEC 2003)},
year = {2003},
note = {March 9 to 12, Melbourne, Florida, U.S.A.},
abstract = {Nowadays, large medical databases consist of a collection of smaller databases, each on possibly different fields and using different formats, making it increasingly difficult to retrieve valuable information among the thousands of documents retrieved by a simple query. A new Evolutionary Learning Interactive Search Engine (ELISE) feeds on previous user requests to retrieve alternative'' documents that may not be returned by more conventional search engines, in a way that may recall lateral thinking.'' Tests on the Cystic Fibrosis Database'' benchmark (CFD) prove that, while suggesting original documents by adaptation of its internal rules to the context of the user, ELISE is able to improve its recall rate.
},
pdf = {Papers/130_CAHC-01.pdf}
}

@inproceedings{Semet2003a,
author = {Semet, Yann and Jamont, Yannick and Biojout, Raphael
and Lutton, Evelyne and Collet, Pierre},
title = {Artificial Ant Colonies and E-Learning: An Optimisation
of Pedagogical Paths},
booktitle = {HCII'03 - 10th international conference on Human Computer Interaction},
year = {2003},
note = {Crete, Greece, June 22-27},
abstract = {This paper describes current research on the optimisation of the pedagogical path of a student in an existing e-learning software. This optimisation is performed following the models given by a fairly recent field of Artificial Intelligence: Ant Colony Optimisation (ACO). The underlying structure of the E-learning material is represented by a graph with valued arcs whose weights are optimised by virtual ants that release virtual pheromones along their paths. This gradual modification of the graph's structure improves its pedagogic pertinence in order to increase pedagogic success. The system is developed for Paraschool, the leading French E-learning company. Tests will be conducted on a pool of more than 10,000 users.},
pdf = {Papers/142_SemetHCII03.pdf}
}

@inproceedings{Semet2003b,
author = {Semet, Yann and Lutton, Evelyne and Collet, Pierre},
title = {Ant Colony Optimisation for E-Learning: Observing the
Emergence of Pedagogic Suggestions},
booktitle = {In SIS2003, IEEE Swarm Intelligence Symposium, Indianapolis,
USA, April 24-26},
year = {2003},
abstract = {An attempt is made to apply Ant Colony Optimization (ACO) heuristics to an E-learning problem: the pedagogic material of an online teaching website for high school studentsis modelled as a navigation graph where nodes are exercises or lessons and arcs are hypertext links. The arcs' valuation,representing the pedagogic structure and conditioning the website's presentation, is gradually modified through the release and evaporation of virtual pheromones that reflect the successes and failures of students roaming around the graph.

A compromise is expected to emerge between the pedagogic structure as originally dictated by professors, the collective experience of the whole pool of students and the particularities of each individual.

The purpose of this study conducted for Paraschool, the leading French e-learning company, is twofold: enhancing thewebsite by making its presentation intelligently dynamic and providing the pedagogical team with a refined auditing tool that could help it identify the strengths and weaknesses of its pedagogic choices.
},
pdf = {Papers/143_SemetSIS03.pdf}
}

@inproceedings{Pauplin-al2004,
author = {Pauplin, Olivier and Louchet, Jean and Lutton, Evelyne and Parent, Michel},
title = {Obstacle detection by Evolutionary Algorithm: the Fly Algorithm},
booktitle = {The second International Conference on
Autonomous Robots and Agents, ICARA2004},
year = {2004},
address = {Palmerston North, New Zealand},
month = {December 13-15},
pages = {139-140},
abstract = {Artificial vision is a key element in robots autonomy. The Fly algorithm is a fast evolutionary algorithm designed for real time obstacle detection using pairs of stereo images. It aims to be used in particular in the fields of mobile robotics and automated vehicles. Based on the Parisian approach, the Fly algorithm produces a set of 3-D points which gather on the surfaces of obstacles. This paper describes the use of the Fly algorithm for obstacle detection in a real environment, and a possible use for vehicle control is presented.},
pdf = {Papers/162_pauplin.pdf}
}

@inproceedings{PLLP04-2,
author = {Pauplin, Olivier and Louchet, Jean and Lutton, Evelyne and Parent, Michel},
title = {Applying evolutionary optimisation to robot obstacle avoidance},
booktitle = {ISCIIA},
year = {2004},
note = {December 20-24, 2004, Haikou, China},
abstract = {This paper presents an articial evolution-based method for stereo image analysis and its application to real-time obstacle detection and avoidance for a mobile robot. It uses the Parisian approach, which consists there in splitting the representation of the robot's environment into a large number of simple primitives, the "flies", which are evolvecd following a biologically inspired scheme and give a fast, low-cost solution to the obstacke detection problem in mobile robotics.
},
pdf = {Papers/171_Pauplin_ISCIIA04.pdf}
}

@inproceedings{DOLS04,
author = {Dunn, Enrique and  Olague, Gustavo and  Lutton,  Evelyne and
Shoenauer, Marc},
title = {Pareto Optimal Sensing Strategies for an Active Vision
System},
booktitle = {CEC, IEEE Congress on Evolutionary Computation},
year = {2004},
note = {http://cec2004.org/sessions.htm, Vol. 1, pp. 457-463, Portland,
Oregon, USA, June 19-23},
abstract = {We present a multi-objective methodology, based on evolutionary computation, for solving the sensor planning problem for an active vision system. The application of different representation schemes, that allow to consider either fixed or variable size camera networks in a single evolutionary process, is studied. Furthermore, a novel representation of the recombination and mutation operators is brought forth. The developed methodology is incorporated into a 3D simulation environment and experimental results shown. Results validate the flexibility and effectiveness of our approach and offer new research alternatives in the field of sensor planning.
},
pdf = {Papers/167_DunnOlague.pdf}
}

@inproceedings{OFPL04,
author = {Olague, Gustavo and  Fern\'{a}ndez, Francisco and
P\'{e}rez, Cynthia and  Lutton,  Evelyne},
title = {The Infection Algorithm an Artificial Epidemic Approach for Dense Stereo Matching},
booktitle = {PPSN, Parallel Problem Solving from Nature},
year = {2004},
note = {X. Yao et al. (Eds.): LNCS
3242, pp. 622-632, Springer-Verlag, Birmingham, UK, September, 18-22},
abstract = {We present a new bio-inspired approach applied to a problem of stereo images matching. This approach is based on an artifical epidemic process, that we call ldquothe infection algorithm.rdquo The problem at hand is a basic one in computer vision for 3D scene reconstruction. It has many complex aspects and is known as an extremely difficult one. The aim is to match the contents of two images in order to obtain 3D informations which allow the generation of simulated projections from a viewpoint that is different from the ones of the initial photographs. This process is known as view synthesis. The algorithm we propose exploits the image contents in order to only produce the necessary 3D depth information, while saving computational time. It is based on a set of distributed rules, that propagate like an artificial epidemy over the images. Experiments on a pair of real images are presented, and realistic reprojected images have been generated.},
}

@inproceedings{LLG-05,
author = {Lutton, Evelyne and Grenier, Pierre and L\'evy V\'ehel, Jacques},
title = {An Interactive Evolutionary Algorithm for Multifractal Bayesian Denoising},
booktitle = {EVOIASP},
year = {2005},
note = {30 March - 1 April, Lausanne},
abstract = {We present in this paper a multifractal bayesian denoising technique based on an interactive EA. The multifractal denoising algorithm that serves as a basis for this technique is adapted to complex images and signals, and depends on a set of parameters. As the tuning of these parameters is a difficult task, highly dependent on psychovisual and subjective factors, we propose to use an interactive EA to drive this process. Comparative denoising results are presented with automatic and interactive EA optimisation. The proposed technique yield efficient denoising in many cases, comparable to classical denoising techniques. The versatility of the interactive implementation is however a major advantage to handle difficult images like IR or medical images.},
pdf = {Papers/173_Lutton.pdf}
}

@inproceedings{ColletPPSN2000,
author = {Collet, Pierre and Lutton, Evelyne and Schoenauer, Marc and Louchet, Jean},
title = {Take it \mbox{EASEA}},
booktitle = {Parallel Problem Solving from Nature - PPSN VI
6th International Conference},
editor = {Schoenauer, M. and Deb, K. and Rudolf, G. and Yao, X. and Lutton, E. and Merelo. J.J. and Schwefel, H.-P.},
year = {2000},
publisher = {Springer Verlag},
month = {September 16-20},
note = {LNCS 1917},
abstract = { Evolutionary algorithms are not straightforward to implement and the lack of any specialised language forces users to reinvent the wheel every time they want to write a new program. Over the last years, evolutionary libraries have appeared, trying to reduce the amount of work involved in writing such algorithms from scratch, by offering standard engines, strategies and tools. Unfortunately, most of these libraries are quite complex to use, and imply a deep knowledge of object programming and C++. To further reduce the amount of work needed to implement a new algorithm, without however throwing down the drain all the man-years already spent in the development of such libraries, we have designed EASEA (acronym for EAsy Specification of Evolutionary Algorithms): a new high-level language dedicated to the specification of evolutionary algorithms. EASEA compiles .ez files into C++ object files, containing function calls to a chosen existing library. The resulting C++ file is in turn compiled and linked with the library to produce an executable file implementing the evolutionary algorithm specified in the original .ez file.

EASEA is available on the web at: http://apis.saclay.inria.fr/},
pdf = {Papers/32_PPSNVI.pdf}
}

@inproceedings{PLEVGO06,
author = {Legrand, Pierrick and Lutton, Evelyne and Olague, Gustavo},
title = {Evolutionary denoising based on an estimation of H\"older exponents with oscillations},
booktitle = {EVOIASP 2006, 8th European Workshop on Evolutionary Computation in Image Analysis and Signal Processing},
year = {2006},
month = {April 10-12},
abstract = {In multifractal denoising techniques, the acuracy of the Hölder exponents estimations is crucial for the quality of the outputs. In continuity with the method described in [1], where a wavelet decomposition was used, we investigate the use of another Hölder exponent estimation technique, based on the analysis of the local "oscillations" of the signal. The associated inverse problem to be solved, i.e. finding the signal which is the closest to the initial noisy one but having the prescribed regularity, is then more complex. Moreover, the associated search space is of a different nature as in [1], which necessitates the design of ad-hoc genetic operators.
},
pdf = {Papers/191_deb_gen_5pages5.pdf}
}

@inproceedings{Trujillo-BMVC2007,
author = {Trujillo, Leonardo and  Olague, Gustavo and Fernandez, Francisco and Lutton, Evelyne},
title = {Evolutionary feature selection for Bayesian object recognition, novel object detection and object
saliency estimation using GMMs},
booktitle = {The British Machine Vision Conference},
note = {University of Warwick, UK, September 10-13},
year = {2007},
abstract = {This paper presents a method for object recognition, novel object detection, and estimation of the most salient object within a set. Objects are sampled using a scale invariant region detector, and each region is characterized by the subset of texture and color descriptors selected by a Genetic Algorithm (GA). Using multiple views of an object, and multiple regions per view, objects are modeled using mixture of Gaussians, where each object O is a possible class for a given image region. Given a set of objects N, the GA learns a corresponding Gaussian Mixture Model (GMM) for each object in the set employing a one vs. all training scheme. Thence, given an input image where interest regions are detected, if a large majority of the regions are classified as regions of object O, then it is assumed that said object appears within the imaged scene. The GA's fitness propoes: 1) a high classification accuracy, 2) the selection of a minimal subset of descriptors, and 3) a high separation among models. The separation between two GMMs is computed using a weighted version of Fishers linear discriminant, which is also used to estimate the most},
pdf = {Papers/TrujilloLeonardo_OlagueGustavo_FernandezFrancisco_LuttonEvelyne_08-02-2008_trujillo_BMVC.pdf}
}

@inproceedings{Trujillo-EvoIASP2008,
author = {Trujillo, Leonardo and  Olague, Gustavo and Fernandez, Francisco and Lutton, Evelyne},
title = {Selecting local region descriptors with a genetic algorithm for real-world place recognition},
booktitle = {Tenth European Workshop on Evolutionary Computation in Image Analysis and Signal
Processing, EvoIASP2008},
note = {Napoli, Italy, 26-28 March},
year = {2008},
abstract = {The basic problem for a mobile vision system is determining where it is located within the world. In this paper, a recognition system is presented that is capable of identifying known places such as rooms and corridors. The system relies on a bag of features approach using locally prominent image regions. Real-world locations are modeled using a mixture of Gaussian representations, thus allowing for a multimodal scene characterization. Local regions are represented by a set of 108 statistical descriptors computed from different modes of information. Therefore, the system needs to determine which subset of descriptors capture regularities between image regions of the same location, and also discriminates between regions of different places. A genetic algorithm is used to solve this selection task, using a fitness meaure that promotes : 1) a high classification accuracy; 2) the selection of a minimal subset of descriptors; and 3) a hight separation among place models. The approach is tested on two real world examples: a) using a sequence of still images with 4 different locations; and b) a sequence that contains 8 different locations. Results confirm the ability of the system to identify previously seen places in a real-world setting.},
pdf = {Papers/TrujilloLeonardo_OlagueGustavo_FernandezFrancisco_LuttonEvelyne_19-02-2008_EvoIASP2008.pdf}
}

@inproceedings{Trujillo-EvoHOT2008,
author = {Trujillo, Leonardo and  Olague, Gustavo  and Lutton, Evelyne and Fernandez, Francisco},
title = {Discovering several robot behaviors through speciation},
booktitle = {Fourth European Workshop on Bio-Inspired
Heuristics for Design Automation, EvoHOT2008},
note = {Napoli, Italy, 26-28 March},
year = {2008},
abstract = {
This contribution studies speciation from the standpoint of evolutionary robotics (ER). In ER, the sensory-motor mappings that control an autonomous agent are designed using a neuro-evolutionary framework. An extension to this process is presented here, where speciation is incorporated to the evolution process in order to obtain a varied set of solutions for the same robotics problem using a single algorithmic run. Although speciation is common in evolutionary computation, it has been less explored in behavior-based robotics. When employed, speciation usually relies on a distance measure that allows different individuals to be compared. The distance measure is normally computed in objective or phenotypic space. However, the speciation process presented here is intended to produce several distinc robot behaviors; hence, speciation is sought in behavioral space. Thence, individuals neurocontrollers are described using behavior signatures, which represent the traversed path of the robot within the training environment and are encoded using a character string. With this representation, behavior signatures are compared using the normalized Levenshtein distance metric (N-GLD). Results indicate that speciation in behavioral space does indeed allow the ER system to obtain several navigation strategies for a common experimental setup. This is illustrated by comparing the best individual of each species with those obtained using the Neuro-Evolution of Augmenting Topologies (NEAT) method that speciates neural networks in topological space.},
pdf = {Papers/TrujilloLeonardo_OlagueGustavo_LuttonEvelyne_FernandezFrancisco_19-02-2008_EvoHOT2008.pdf}
}

@inproceedings{Aichour-NICSO2007,
author = {Aichour, Malek and Lutton, Evelyne},
title = {Cooperative co-evolution inspired operators for classical GP schemes},
booktitle = {International Workshop on Nature Inspired Cooperative Strategies
for Optimisation},
note = {Acireale, Sicily, Italy, Volume Studies in
Computational Intelligence},
year = {2007},
abstract = {This work is a first step toward the design of a cooperative-coevolution GP for symbolic regression, which first output is a selective mutation operator for classical GP. Cooperative co-evolution techniques rely on the imitation of cooperative capabilities of natural populations and have been successfully applied in various domains to solve very complex optimization problems. It has been proved on several applications that the use of two fitness measures (local and global) within an evolving population allow to design more efficient optimization schemes. We currently investigate the use of a two-level fitness measurement for the design of operators, and present in this paper a selective mutation operator. Experimental analysis on a symbolic regression problem give evidence of the efficiency of this operator in comparison to classical subtree mutation.},
pdf = {Papers/226_NICSO2007-006.pdf}
}

@inproceedings{Trujillo-GECCO2008-a,
author = {Trujillo, Leonardo and  Olague, Gustavo  and Lutton, Evelyne and Fernandez, Francisco},
title = {Multi-Objective design of operators that detect points of interest in images},
booktitle = {The Genetic and Evolutionary Computation Conference, GECCO},
note = {July 12-16, Atlanta, Georgia, USA},
year = {2008},
abstract = {In this paper, a multiobjective (MO) learning approach to image feature extraction is described, where Pareto-optimal interest point (IP) detectors are synthesized using genetic programming (GP). IPs are image pixels that are unique, robust to changes during image acquisition, and convey highly descriptive information. Detecting such features is ubiquitous to many vision applications, e.g. object recognition, image indexing, stereo vision, and content based image retrieval. In this work, candidate IP operators are automatically synthesized by the GP process using simple image operations and arithmetic functions. Three experimental optimization criteria are considered: 1) the repeatability rate; 2) the amount of global separability between IPs; and 3) the information content captured by the set of detected IPs. The MO-GP search considers Pareto dominance relations between candidate operators, a perspective that has not been contemplated in previous research devoted to this problem. The experimental results suggest that IP detection is an illposed problem for which a single globally optimum solution does not exist. We conclude that the evolved operators outperform and dominate, in the Pareto sense, all previously man-made designs.},
pdf = {Papers/TrujilloLeonardo_OlagueGustavo_LuttonEvelyne_FernandezFrancisco_01-07-2008_t12pap522-trujillo.pdf}
}

@inproceedings{Trujillo-GECCO2008-b,
author = {Trujillo, Leonardo and  Olague, Gustavo  and Lutton, Evelyne and Fernandez, Francisco},
title = {Behavior-based speciation for evolutionary robotics},
booktitle = {The Genetic and Evolutionary Computation Conference, GECCO},
note = {July 12-16, Atlanta, Georgia, USA},
year = {2008},
abstract = {This paper describes a speciation method that allows an evolutionary process to learn several robot behaviors using a single execution. Species are created in behavioral space in order to promote the discovery of different strategies that can solve the same navigation problem. Candidate neurocontrollers are grouped into species based on their corresponding behavior signature, which represents the traversed path of the robot within the environment. Behavior signatures are encoded using character strings and are compared using the string edit distance. The proposed approach is better suited for an evolutionary robotics problem than speciating in objective or topological space. Experimental comparison with the NEAT method confirms the usefulness of the proposal.},
pdf = {Papers/TrujilloLeonardo_OlagueGustavo_LuttonEvelyne_FernandezFrancisco_01-07-2008_t02pap547-trujillo.pdf}
}

@inproceedings{Barriere-NICSO2008,
author = {Barriere, Olivier and Lutton, Evelyne},
title = {Experimental analysis of a variable size mono-population cooperative-coevolution strategy},
booktitle = {NICSO 2008, International Workshop on Nature Inspired Cooperative Strategies for Optimization, },
note = {Puerto de La Cruz, Tenerife, 12-14 November },
year = {2008},
abstract = {Cooperative coevolution strategies have been used with success to solve complex problems in various application domains. These techniques rely on a formulation of the problem to be solved as a cooperative task, where individuals collaborate or compete in order to collectively build a solution. Several strategies have been developed depending on the way the problem is shared into interdependent subproblems and the way coevolution occur (multipopulation versus monopopulation schemes). Here, we deal with a mono-population strategy (Parisian approach) applied to a problem related to the modeling of a cheese ripening process (french Camembert). A variable sized population Parisian GP strategy has been experimented, using adaptive deflating and inflating schemes for the population size. Experimental results show the effectiveness of the approach on real data collected on a laboratory cheese ripening production line.},
pdf = {Papers/BarriereOlivier_LuttonEvelyne_24-10-2008_INCALIN-NICSO2008.pdf}
}

@inproceedings{Barriere-PPSN2008,
author = {Barriere, Olivier and Lutton, Evelyne and Baudrit, Cedric and Sicard, Mariette and Pinaud, Bruno and Perrot, Nathalie},
title = {Modeling human expertise on a cheese ripening industrial process using GP},
booktitle = {PPSN, 10th International Conference on Parallel Problem Solving from Nature},
note = {September 13-17, Technische Universität Dortmund, Germany},
year = {2008},
abstract = {Industrial agrifood processes often strongly rely on human expertise, expressed as know-how and control procedures based on subjective measurements (color, smell, texture), which are very difficult to capture and model. We deal in this paper with a cheese ripening process (of french Camembert), for which experimental data have been collected within a cheese ripening laboratory chain. A global and a monopopulation cooperative/coevolutive GP scheme (Parisian approach) have been developed in order to simulate phase prediction (i.e. a subjective estimation of human experts) from microbial proportions and Ph measurements. These two GP approaches are compared to Bayesian network modeling and simple multilinear learning algorithms. Preliminary results show the effectiveness and robustness of the Parisian GP approach.},
pdf = {Papers/BarriereOlivier_LuttonEvelyne_BaudritCedric_SicardMariette_PinaudBruno_PerrotNathalie_18-06-2008_INCALIN-PPSN2008-Final.pdf}
}

@inproceedings{JLVFMEL,
author = {L\'{e}vy V\'{e}hel, J. and Mendivil, F. and Lutton, E.},
title = {Overcompressing JPEG images},
booktitle = {EvoIASP2007, 9th European
Workshop on Evolutionary Computation in Image Analysis and Signal
Processing},
note = {April 11-13, Valencia, Spain},
publisher = {Springer Verlag},
year = {2007},
abstract = {Overcompression is the process of post-processing compressed images to gain either further size reduction or improved quality. This is made possible by the fact that the set of all "reasonable" images has a sparse structure. In this work, we apply this idea to the overcompression of JPEG images: We reduce the blocking artifacts commonly seen in JPEG images by allowing the low frequency coefficients of the DCT to vary slightly. Evolutionary strategies are used in order to guide the modification of the coefficients towards a smoother image. },
pdf = {Papers/214_LevyVehel.pdf}
}

@inproceedings{ELJLV2006,
author = {Lutton, E. and L\'{e}vy V\'{e}hel, J.},
title = {Pointwise Regularity of Fitness Landscapes and the Performance of a Simple ES},
booktitle = {CEC'06},
year = {2006},
month = {July, 16-21},
abstract = {We present a theoretical and experimental analysis of the influence of the pointwise irregularity of the fitness function on the behavior of an (1+1)ES. Previous work on this subject suggests that the performance of an EA strongly depends on the irregularity of the fitness function. Several irregularity measures have been derived for discrete search spaces, in order to numerically characterize this type of difficulty for EA. These characterizations are mainly based on Holder exponents. Previous studies used however a global characterization of fitness regularity (the global Holder exponent), with experimental validations being conducted on test functions with uniform regularity. This is extended here in two ways: Results are now stated for continuous search spaces, and pointwise instead of global irregularity is considered. In addition, we present a way to modify the genetic topology to accommodate for variable regularity: The mutation radius, which controls the size of the neighbourhood of a point, is allowed to vary according to the pointwise irregularity of the fitness function. These results are explained through a simple theoretical analysis which gives a relation between the pointwise Holder exponent and the optimal mutation radius. Several questions connected to on-line measurements and usage of regularity in EAs are raised.},
pdf = {Papers/211_LL-CEC2006.pdf}
}

@inproceedings{GVELPC06,
author = {Valigiani, G. and Lutton, E. and Collet, P.},
title = {Adapting the {E}lo rating},
booktitle = {13th ISPE International Conference on Concurrent Engineering, CE'06},
year = {2006},
month = {September 18-22},
abstract = {Paraschool (the French leading e-learning company, with more than 250,000 registered students), wanted an intelligent software to guide students in their graph of pedagogic items. The very large number of students suggested to use students as artificial ants, leaving stigmergic information on the web-site graph to optimise pedagogical paths. The differences between artificial ants and students led to describe a new concurrent paradigm called "man-hill optimization," where optimization emerges from the behaviour of humans exploring a web site.

At this stage, the need of rating pedagogical items showed up in order to direct students towards items adapted to their level. A solution was found in the ELO automatic rating process, that also provides (as a side-effect) a powerful audit system that can track syntactic and semantic problems in exercises. For an effective use, this paper shows how the ELO rating process has been modified to overcome the Deflation problem. },
pdf = {Papers/208_paper_ce2006_Valigiani_Lutton_Collet.pdf}
}

@inproceedings{Jamont-al05,
author = {Valigiani, Gregory and Jamont, Yannick and Bourgeois-Republique, C. and Biojout, Raphael and Lutton, Evelyne and Collet, Pierre},
title = {Experimenting with a Real-Size Man-Hill to Optimize Pedagogical Paths},
booktitle = {20th ACM symposium on  Applied Computing, SAC'05, Bioinformatics track},
year = {2005},
address = {Santa Fe, New Mexico, USA},
month = {March, 13-17},
abstract = { This paper describes experiments aimed at adapting Ant Colony Optimization (ACO) techniques to an e-learning environment, thanks to the fact that the available on-line material can be organized in a graph by means of hyperlinks between educational topics. The structure of this graph is to be optimized in order to facilitate the learning process for students.

ACO is based on an ant-hill metaphor. In this case, however, the agents that move on the graph are students who unconsciously leave pheromones in the environment depending on their success or failure. In the paper, the whole process is therefore referred to as a "man-hill."

Compared to the [13, 14] papers that were providing guidelines for this problem, real-size tests have been performed, showing that man-hills behave differently from ant-hills. The notion of pheromone erosion (rather than evaporation) is introduced.},
pdf = {Papers/166_sac.pdf}
}

@inproceedings{Perez-al05,
author = {Perez, Cynthia and Olague, Gustavo and Fernandez, Francisco and Lutton, Evelyne},
title = {An Evolutionary Infection Algorithm for Dense Stereo Correspondence},
booktitle = {EvoIASP 2005},
year = {2005},
month = {30 March--1 April},
abstract = {This work presents an evolutionary approach to improve the infection algorithm to solve the problem of dense stereo matching. Dense stereo matching is used for 3D reconstruction in stereo vision in order to achieve fine texture detail about a scene. The algorithm presented in this paper incorporates two different epidemic automata applied to the correspondence of two images. These two epidemic automata provide two different behaviours which construct a different matching. Our aim is to provide with a new strategy inspired on evolutionary computation, which combines the behaviours of both automata into a single correspondence process. The new algorithm will decide which epidemic automata to use based on inheritance and mutation, as well as the attributes, texture and geometry, of the input images. Finally, we show experiments in a real stereo pair to show how the new algorithm works.
},
pdf = {Papers/168_PerezEvoiasp2005.pdf}
}

@inproceedings{Dunn-EuroGP2005,
author = {Dunn, Enrique and Olague, Gustavo and Lutton, Evelyne},
title = {Automated Photogrammetric Network Design using the Parisian  Approach},
booktitle = {EvoIASP 2005},
year = {2005},
note = {30 March - 1 April, Lausanne, {\bf Nominated for the best paper Award.}},
abstract = {We present a novel camera network design methodology based on the Parisian approach to evolutionary computation. The problem is partitioned into a set of homogeneous elements, whose individual contribution to the problem solution can be evaluated separately. These elements are allocated in a population with the goal of creating a single solution by a process of aggregation. Thus, the goal of the evolutionary process is to generate individuals that jointly form better solutions. Under the proposed paradigm, aspects such as problem decomposition and representation, as well as local and global fitness integration need to be addressed. Experimental results illustrate significant improvements, in terms of solution quality and computational cost, when compared to canonical evolutionary approaches.
},
pdf = {Papers/172_Dunn.pdf}
}

@inproceedings{Lutton-Pilz-JLV05b,
author = {Lutton, Evelyne and Pilz, Mario and L\'evy V\'ehel, Jacques},
title = {The Fitness Map Scheme. Application to interactive multifractal image denoising},
booktitle = {CEC2005},
year = {2005},
month = {September, 2-5},
organization = {IEEE Congress on Evolutionary Computation},
abstract = {Interactive evolutionary algorithms (IEA) often suffer from what is called the "user bottleneck." In this paper, we propose and analyse a method to limit the user interactions, while still providing sufficient informations for the EA to converge. The method has been currently developed on a multifractal image denoising application: a multifractal denoising method is adapted to complex images, but depends on a set of parameters that are quite difficult to tune by hand. A simple IEA has been developed for this purpose in a previous work. We now experiment an approximation of the user judgment, via a "fitness map", that helps to reduce the number of user-interactions. The method is easily extensible to other interactive, or computationally expensive, evolutionary schemes.},
pdf = {Papers/175_CEC_Denoise_IEA_Final.pdf}
}

@inproceedings{LLG-Gretsi2005,
author = {Lutton, Evelyne and L\'evy V\'ehel, Jacques and Grenier, Pierre},
title = {Débruitage multifractal par évolution interactive},
booktitle = {GRETSI},
year = {2005},
abstract = {Nous présentons dans ce papier une méthode interactive de débruitage fondée sur une technique de débruitage multifractal bayésien adaptée aux signaux complexes. Cette technique nécessite le réglage d'un jeu de paramètres, et le résultat dépend fortement de facteurs psychovisuels et subjectifs. L'originalité de l'approche réside dans l'emploi d'un algorithme évolutionnaire interactif pour gérer l'ajustement des paramètres. Nous présentons des résultats comparatifs de débruitage, qui prouvent l'efficacité et la flexibilité de la méthode.

We present in this paper a multifractal bayesian denoising technique based on an interactive EA. The multifractal denoising algorithm that serves as a basis for this technique is adapted to complex images and signals, and depends on a set of parameters. As the tuning of these parameters is a difficult task, highly dependent on psychovisual and subjective factors, an interactive EA has been used to drive this process. Comparative denoising results are presented. The proposed technique yield efficient denoising in many cases, comparable to classical denoising techniques. The versatility of the interactive implementation is however a major advantage to handle difficult images like IR or medical images.
},
pdf = {Papers/165_MFD-Gretsi05-final.pdf}
}

@inproceedings{Valigiani-al05,
author = {Valigiani, Gregory and Lutton, Evelyne and Jamont, Yannick and Biojout, Raphael and Collet, Pierre},
title = {Evaluating a Real-Size Man-Hill},
booktitle = {ECE'WSEAS'05},
year = {2005},
month = {November 17-19},
abstract = {"Man-hill" optimisation (a slightly different form of Ant Colony Optimisation) has been applied to the e-learning software of Paraschool (French e-learning company): instead of implementing artificial ants, students visiting the site unknowingly leave stigmergic information on the Paraschool web-site graph, in order to promote the emergence of pedagogic paths. In order to present students with exercises that match their level, it was needed to find some kind of evaluation mechanism, both for the student and for the Paraschool items. A solution was found in the Elo automatic rating process, that also provides as a side-effect a powerful audit system that can track semantic problems in exercises.
},
pdf = {Papers/188_501-353.pdf}
}

@inproceedings{OchoaLutton2007,
author = {Ochoa, Gabriela and   Lutton, Evelyne and  Burke, Edmund K.},
title = {Cooperative Royal Road Functions},
booktitle = {Evolution Artificielle, Tours,  France, October 29-31},
year = {2007},
abstract = {We propose using the so called Royal Road functions as test functions for cooperative co-evolutionary algorithms (CCEAs). The Royal Road functions were created in the early 90's with the aim of demonstrating the superiority of GAs over local search methods. Unexpectedly, the opposite was found true, but this research conducted to understanding the phenomenon of hitchhiking whereby unfavorable alleles may become established in the population following an early association with an instance of a highly fit schema. Here, we take advantage of the modular and hierarchical structure of the Royal Road functions to adapt them to the co-evolutionary setting. Using a multiple population approach, we show that a CCEA easily outperforms a standard GA on the Royal Road functions, by naturally overcoming the hitchhiking effect. Moreover, we found that the optimal number of sub-populations for the CCEA is not the same as the number of components the function can be linearly separated, and propose an explanation for this behavior. We argue that this class of functions may serve in foundational studies of cooperative co-evolution.},
pdf = {Papers/225_ParisCoop.pdf}
}

@inproceedings{ColletLuttonLouchet2002,
author = {Collet, Pierre and Lutton, Evelyne and Louchet, Jean},
title = {Issues on the Optimisation of Evolutionary Algorithm Code},
booktitle = {CEC2002 conference on Evolutionary Computation,Honolulu},
year = {2002},
month = {May},
abstract = {The aim of this paper is to show that the common belief, in the evolutionary community, that evaluation time usually takes over 90 percent of the total time, is far from being always true. In fact, many real-world applications showed a much lower percentage. This raises several questions, one of them being the balance between fitness and operators computational complexity: what is the use of elaborating smart evolutionary operators to reduce the number of evaluations if as a result, the total computation time is increased ?},
pdf = {Papers/2_cec-Pierre.pdf}
}

@inproceedings{BLW-GECCO2009,
author = {Barrière, Olivier and Lutton, Evelyne and Wuillemin, Pierre-Henri},
title = {Bayesian Network Structure learning using Cooperative Coevolution},
booktitle = {Genetic and Evolutionary Computation Conference (GECCO 2009)},
year = {2009},
abstract = {We propose a cooperative-coevolution -- Parisian trend -- algorithm, IMPEA (Independence Model based Parisian EA), to the problem of Bayesian networks structure estimation. It is based on an intermediate stage which consists of evaluating an independence model of the data to be modelled. The Parisian cooperative coevolution is particularly well sui-ted to the structure of this intermediate problem, and allows to represent an independence model with help of a whole population, each individual being an independence statement, i.e. a component of the independence model. Once an independence model is estimated, a Bayesian network can be built. This two level resolution of the complex problem of Bayesian network structure estimation has the major advantage to avoid the difficult problem of direct acyclic graph representation within an evolutionary algorithm, which causes many troubles related to constraints handling and slows down algorithms. Comparative results with a deterministic algorithm, PC, on two test cases (including the Insurance BN benchmark), prove the efficiency of IMPEA, which provides better results than PC in a comparable computation time, and which is able to tackle more complex issues than PC.},
pdf = {Papers/BarriereOlivier_LuttonEvelyne_WuilleminPierre-Henri_09-06-2009_t09fp625-barriere.pdf}
}

@inproceedings{SLL-ICEC2009,
author = {Sapin, Emmanuel and Louchet, Jean and Lutton, Evelyne},
title = {The Fly Algorithm revisited: Adaptation to Cmos image sensor.},
booktitle = {ICEC 2009, International Conference on Evolutionary Computation},
year = {2009},
month = {October, 5-7},
abstract = {Cooperative coevolution algorithms (CCEAs) usually represent a searched solution as an aggregation of several individuals (or even as a whole population). In other terms, each individual only bears a part of the searched solution. This scheme allows to use the artificial Darwinism principles in a more economic way, and the gain in terms of robustness and efficiency is important. In the computer vision domain, this scheme has been applied to stereovision, to produce an algorithm (the fly algorithm) with asynchronism property. However, this property has not yet been fully exploited, in particular at the sensor level, where CMOS technology opens perpectives to faster reactions. We describe in this paper a new coevolution engine that allow the Fly Algorithm to better exploit the properties of CMOS image sensors.},
pdf = {Papers/SapinEmmanuel_LouchetJean_LuttonEvelyne_05-07-2009_paper.pdf}
}

@inproceedings{EA2009,
author = {Vidal, Franck P. and Lazaro-Ponthus, Delphine and Legoupil,
Samuel and Louchet, Jean and Lutton, Evelyne and Rocchisani, Jean-
Marie},
title = {Artificial Evolution for 3D PET Reconstruction},
booktitle = {Proceedings of the 9th international conference on
Artificial Evolution (EA'09)},
year = 2009,
month = oct,
abstract = {This paper presents a method to take advantage of
artificial evolution in positron emission tomography reconstruction.
This imaging technique produces datasets that correspond to the
concentration of positron emitters through the patient.
Fully 3D tomographic reconstruction requires high computing power and
Our aim is to reduce the computing cost and produce datasets while
retaining the required quality.
Our method is based on a coevolution strategy (also called Parisian
evolution) named fly algorithm''.
Each fly represents a point of the space and acts as a positron emitter.
The final population of flies corresponds to the reconstructed data.
Using marginal evaluation'', the fly's fitness is the positive or
negative contribution of this fly to the performance of the population.
This is also used to skip the relatively costly step of selection and
simplify the evolutionary algorithm.},
pdf = {Papers/Vidal2009EA.pdf}
}

@inproceedings{MIC2009,
author = {Vidal, Franck P. and Lazaro-Ponthus, Delphine and Legoupil,
Samuel and Louchet, Jean and Lutton, Evelyne and Rocchisani, Jean-
Marie},
title = {PET Reconstruction Using a Cooperative Coevolution Strategy},
booktitle = {Proceedings of the IEEE Medical Imaging Conference 2009},
year = 2009,
month = oct,
organization = {IEEE},
abstract = {Fully 3D tomographic reconstruction in nuclear medicine
requires high computing power and leads to many challenges.
The trend today is to use more general methods that can integrate more
realistic models (application-specific physics and data acquisition
system geometry).
To date, the use of such methods is still restricted due to the heavy
computing power needed.
Evolutionary algorithms have proven to be efficient optimisation
techniques in various domains, including medicine and medical imaging.
However the use of evolutionary computation in tomographic
reconstruction has been largely overlooked.
In previous work, we showed that an artificial coevolution strategy
(also called Parisian evolution'') based on the fly algorithm''
can be used to reconstruct the 3D distribution of radioactive emitters
in Single Photon Emission Computed Tomography (SPECT).
In this abstract, we propose a computer-based algorithm for fully 3D
reconstruction in Positron Emission Tomography (PET) based on the same
approach and evaluate its relevance.
Realistic models describing the physics of PET could be integrated in
the reconstruction loop while taking advantage of artificial evolution
to reduce the computing time.},
pdf = {Papers/Vidal2009IEEE-NSS-MIC-abstract.pdf}
}

@inproceedings{Vidal2009MIC,
author = {Vidal, Franck P. and Louchet, Jean and Lutton, Evelyne and Rocchisani, Jean-Marie},
title = {{PET} Reconstruction Using a Cooperative Coevolution Strategy in     {LOR} Space},
booktitle = {IEEE Nuclear Science Symposium Conference Record},
year = {2009},
month = oct,
publisher = {IEEE},
pages = {3363-3366},
abstract = {This paper presents preliminary results of a novel method that takes
advantage of artificial evolution for positron emission tomography
(PET) reconstruction. Fully 3D tomographic reconstruction in PET
requires high computing power and leads to many challenges. To date,
the use of such methods is still restricted due to the heavy computing
power needed. Evolutionary algorithms have proven to be efficient
optimisation techniques in various domains. However the use of evolutionary
computation in tomographic reconstruction has been largely overlooked.
We propose a computer-based algorithm for fully 3D reconstruction
in PET based on artificial evolution and evaluate its relevance.},
annotation = {Oct~25--31, 2009},
keywords = {Positron emission tomography, genetic algorithms, optimization methods},
pdf = {Papers/Vidal2009MIC.pdf}
}

@inproceedings{Vidal2010EvoIASP,
author = {Vidal, Franck P. and Louchet, Jean and Rocchisani, Jean-Marie and Lutton, Evelyne},
title = {New genetic operators in the Fly algorithm: application to medical {PET} image reconstruction},
booktitle = {Evolutionary Computation in Image Analysis and Signal Processing, EvoApplications 2010, Part I, LNCS 6024,C. Di Chio et al. (Eds.) },
year = {2010},
month = apr,
publisher = {Springer},
pages = {},
abstract = {Our reconstruction method is based on a cooperative
coevolution strategy (also called Parisian evolution): the fly
algorithm''.  Each fly is a 3D point that mimics a positron emitter.
The flies' position is progressively optimised using evolutionary
computing to closely match the data measured by the imaging system.
The performance of each fly is assessed using a marginal
evaluation'' based on the positive or negative contribution of this
fly to the performance of the population.  Using this property, we
propose a thresholded-selection'' method to replace the classical
tournament method.  A mitosis operator is also proposed. It is
triggered to automatically increase the population size when the
number of flies with negative fitness becomes too low.},
note = {7th - 9th April, Istanbul Technical University, Istanbul, Turkey},
keywords = {Positron emission tomography, genetic algorithms, optimization methods, fly algorithm},
pdf = {Papers/Vidal2010EvoIASP.pdf}
}

@inproceedings{Kaufmann2010EvoIASP,
author = {Kaufmann, Benoit and Louchet, Jean and Lutton, Evelyne},
title = {Hand posture recognition using real-time artificial evolution},
booktitle = {Evolutionary Computation in Image Analysis and Signal Processing, EvoApplications 2010, Part I, LNCS 6024,C. Di Chio et al. (Eds.)},
year = {2010},
month = apr,
publisher = {Springer},
pages = {251-260},
abstract = {n this paper, we present a hand posture recognition system (configuration
and position) we designed as part of a gestural man-machine interface.
After a simple image preprocessing, the parameter space (corresponding
to the configuration and spatial position of the user's hand) is
directly explored using a population of points evolved via an
Evolution Strategy.  Giving the priority to exploring the parameter
space rather than the image, is an alternative to the classical
generalisation of the Hough Transform and allows to meet the real-time
constraints of the project. The application is an Augmented Reality
prototype for a long term exhibition at the Cit\'e des Sciences,
Paris. As it will be open to the general public, rather than using
conventional peripherals like a mouse or a joystick, a more natural
interface has been chosen, using a microcamera embedded into virtual
reality goggles in order to exploit the images of the user's hand as
input data and enable the user to manipulate virtual objects without
any specific training.},
note = {7th - 9th April, Istanbul Technical University, Istanbul, Turkey},
pdf = {Papers/KaufmannEvoIASP2010.pdf}
}

@inproceedings{Vidal2010AAPM-2,
pdfkeywords = {Single-Photon Emission Computed Tomography, SPECT, Positron Emission Tomography, PET, tomography, reconstruction, artificial evolution, fly algorithm},
author = {F. P. Vidal and  J. Louchet and {J.-M.} Rocchisani and E. Lutton},
title = {Artificial Evolution for {PET} and {SPECT} reconstruction},
booktitle = {AAPM Annual Meeting},
year = {2010},
month = jul,
annotation = {Jul~18--22, 2010},
abstract = {Purpose: We propose an evolutionary approach for image
reconstruction in nuclear medicine.  Our method is based on a
cooperative coevolution strategy (also called Parisian evolution): the
fly algorithm''. Method and Materials: Each individual, or fly,
corresponds to a 3D point that mimics a radioactive emitter, i.e. a
stochastic simulation of annihilation events is performed to compute
the fly's illumination pattern.  For each annihilation, a
photon is emitted in a random direction, and a second photon is
emitted in the opposite direction.  The line between two detected
photons is called line of response (LOR). If both photons are detected
by the scanner, the fly's illumination pattern is
updated. The LORs of every fly are aggregated to form the population
total illumination pattern. Using genetic operations to optimize the
position of positrons, the population of flies evolves so that the
population total pattern matches measured data.  The final population
of flies approximates the radioactivity concentration. Results: We
have developed numerical phantom models to assess the reconstruction
algorithm. To date, no scattering and no tissue attenuation have been
considered. Whilst this is not physically correct, it allows us to
test and validate our approach in the simplest cases.  Preliminary
results show the validity of this approach in both 2D and fully-3D
modes. In particular, the size of objects, and their relative
concentrations can be retrieved in the 2D mode.  In fully-3D, complex
shapes can be reconstructed. Conclusions: An evolutionary approach for
PET reconstruction has been proposed and validated using simple test
cases. Further work will therefore include the use of more realistic
input data (including random events and scattering), which will
finally lead to implement the correction of scattering within our
algorithm. A comparison study against ML-EM and/or OS-EM methods will
also need to be conducted.},
pdf = {Papers/PosterVidalAAPM-2010.pdf}
}

@inproceedings{VIDAL-PPSN2010,
author = {F. P. Vidal and E. Lutton and  J. Louchet and {J.-M.} Rocchisani },
title = {Threshold selection, mitosis and dual mutation in cooperative
co-evolution: application to medical 3D tomography},
booktitle = { PPSN 2010, 11th International Conference on
Parallel Problem Solving From Nature},
year = {2010},
month = {September},
publisher = {Springer-Verlag},
note = {Krakow, Poland},
abstract = {We present and analyse the behaviour of specialised
operators designed for cooperative coevolution strategy in the
framework of 3D tomographic PET reconstruction. The basis is a simple
cooperative co-evolution scheme (the fly algorithm''), which embeds
the searched solution in the whole population, letting each individual
be only a part of the solution. An individual, or fly, is a 3D point
that emits positrons. Using a cooperative co-evolution scheme to
optimize the position of positrons, the population of flies evolves so
that the data estimated from flies matches measured data. The final
population approximates the radioactivity concentration. In this
paper, three operators are proposed, threshold selection, mitosis and
dual mutation, and their impact on the algorithm efficiency is
experimentally analysed on a controlled test-case. Their extension to
other cooperative co-evolution schemes is discussed.},
pdf = {Papers/Vidal2010PPSN.pdf}
}

@inproceedings{Mesmoudi-ICEC2010,
author = {Mesmoudi, Salma and Perrot, Nathalie and Reuillon, Romain and Bourgine, Paul and Lutton, Evelyne},
title = {Optimal viable path search for a cheese ripening process using a multi-objective EA},
booktitle = {ICEC 2010, International Conference on Evolutionary Computation},
year = {2010},
month = {October},
note = {24-26 oct, Valencia, Spain},
abstract = {Viability theory is a very attractive theoretical approach for
the modeling of complex dynamical systems. However, its scope of
application is limited due to the high computational power it
necessitates. Evolutionary computation is a convenient way to address
some issues related to this theory. In this paper, we present a
multi-objective evolutionary approach to address the optimisation
problem related to the computation of optimal command profiles of a
complex process. The application we address here is a real size
problem from dairy industry, the modeling of a Camembert cheese
ripening process. We have developed a parallel implementation of a
multiobjective EA that has produced a Pareto front of optimal control
profiles (or trajectories), with respect to four objectives. The Pareto
front was then analysed by an expert who selected a interesting
compromise, yielding a new control profile that seems promising for
industrial applications.},
pdf = {Papers/MesmoudiICEC2010.pdf}
}

@inproceedings{Lutton-GECCO2011,
author = {Lutton, Evelyne and Fekete, Jean-Daniel},
title = {Visual Analytics of EA Data},
booktitle = {Genetic and Evolutionary Computation Conference, GECCO 2011},
year = {2011},
note = {July 12-16, 2011, Dublin, Ireland},
abstract = {An experimental analysis of evolutionary algorithms usually
generates a huge amount of multidimensional data, including numeric
and symbolic data. It is difficult to efficiently navigate in such a
set of data, for instance to be able to tune the parameters or
evaluate the efficiency of some operators. Usual features of
existing EA visualisation systems consist in visualising time- or
generation-dependent curves (fitness, diversity, or other
statistics). When dealing with genomic information, the task becomes
even more difficult, as a convenient visualisation strongly depends
on the considered fitness landscape. In this latter case the raw
data are usually sets of successive populations of points of a
complex multidimensional space. The purpose of this paper is to
evaluate the potential interest of a recent visual analytics tool
for navigating in complex sets of EA data, and to sketch future
developements of this tool, in order to better adapt it to the needs
of EA experimental analysis.},
pdf = {Papers/LuttonGECCO2011.pdf}
}

@inproceedings{Tonda-NICSO2011,
author = {Tonda, Alberto and Lutton, Evelyne and Squillero, Giovanni},
title = {Lamps : A Test Problem for Cooperative Coevolution},
booktitle = {NICSO 2011, the 5th International Workshop on
Nature Inspired Cooperative Strategies for Optimization, October 20-22, Cluj Napoca,
Romania},
year = {2011},
abstract = {We present an analysis of the behaviour of Cooperative Co-evolution
agorithms (CCEAs) on a simple test problem, that is the optimal placement of a set
of lamps in a square room, for various problems sizes. Cooperative Co-evolution
makes it possible to exploit more efficiently the artificial Darwinism scheme, as
soon as it is possible to turn the optimisation problem into a co-evolution of interdependent
sub-parts of the searched solution. We show here how two cooperative
strategies, Group Evolution (GE) and Parisian Evolution (PE) can be built for the
lamps problem. An experimental analysis then compares a classical evolution to GE
and PE, and analyses their behaviour with respect to scale.},
pdf = {Papers/TondaNICSO2011.pdf}
}

@inproceedings{Lutton-EA2011,
author = {Lutton, Evelyne and Foucquier, Julie and Perrot, Nathalie and Louchet, Jean and Fekete, Jean-Daniel},
title = {Visual Analysis of population scatterplots},
booktitle = {10th Biannual International Conference on Artificial Evolution (EA-2011), Angers, France},
year = {2011},
abstract = {We investigate how visual analytic tools can deal with the huge amount
of data produced during the run of an evolutionary algorithm. We show, on toy
examples and on two real life problems, how a multidimensional data visualisation
tool like ScatterDice/GraphDice can be easily used for analysing raw output
data produced along the run of an evolutionary algorithm. Visual interpretation
of population data is not used very often by the EA community for experimental
analysis. We show here that this approach may yield additional high level information
that is hardly accessible through conventional computation.},
pdf = {Papers/LuttonEA2011.pdf}
}

@inproceedings{EvoGraphDice-CEC2012,
author = {Cancino, Waldo and  Boukhelifa, Nadia and  Lutton, Evelyne},
title = {EvoGraphDice: Interactive Evolution for Visual Analytics},
booktitle = {IEEE Congress on Evolutionary Computation, June 10-15},
year = {2012},
note = {June 10-15, Brisbane, Australia},
abstract = {Visualization of large and complex datasets is a research challenge,
especially in frameworks like industrial design, decision making and
visual analytics. Interactive Evolution, used not only as an
optimisation tool, but also as an exploration tool may provide some
versatile solutions to this challenge. This paper presents an
attempt in this direction with the EvoGraphDice prototype, developed
on top of GraphDice, a general purpose visualization freeware for
multidimensional visualization based on scatterplot
matrices. EvoGraphDice interactively evolves compound additional
dimensions, that provide new viewpoints on a multidimensional
dataset. Compound dimensions are linear combination of the initial
data dimensions, they are initialised with a Principal Component
Analysis (PCA), and modified progressively by the interactive
evolution process.  Various interactions are available to the user,
either in a transparent way, via a capture of mouse-clicks, or
in a fully controlled manner, where the user has the opportunity to
modify or include his own compound dimension in the evolved
population, control the search space, or do some interactive
queries.  EvoGraphDice is tested on a synthetic dataset of dimension
6, where a known dependency is rediscovered via interactive
manipulation. A second example is presented, based on a real dataset
of dimension 13, provided by an industrial partner.  Our experiments
prove the potential of this interactive approach, and allow us to sketch
future directions of development for the EvoGraphDice prototype.},
pdf = {Papers/CSDL_CEC2012.pdf}
}

@inproceedings{Tonda-EuroGP2012,
author = {Tonda, Alberto Paolo  and  Lutton, Evelyne and  Reuillon, Romain and  Squillero, Giovanni and Wuillemin, Pierre-Henri },
title = {Bayesian Network Structure Learning from Limited Datasets through Graph Evolution},
booktitle = {15th European Conference on Genetic Programming},
year = {2012},
publisher = {Springer Verlag},
note = {11-13 April, Malaga, Spain},
abstract = {Bayesian networks are stochastic models, widely adopted to
encode knowledge in several fields. One of the most
interesting features of a Bayesian network is the
possibility of learning its structure from a set of
data, and subsequently use the resulting model to
perform new predictions.  Structure learning for
such models is a NP-hard problem, for which the
scientific community developed two main approaches:
score-and-search metaheuristics, often
evolutionary-based, and dependency-analysis
deterministic algorithms, based on stochastic
tests. State-of-the-art solutions have been
presented in both domains, but all methodologies
sets of learning data available, often numbering
thousands of samples. This is not the case for many
real-world applications, especially in the food
processing and research industry.  This paper
proposes an evolutionary approach to the Bayesian
structure learning problem, specifically tailored
for learning sets of limited size. Falling in the
category of score-and-search techniques, the
methodology exploits an evolutionary algorithm able
to work directly on graph structures, previously
used for assembly language generation, and a scoring
function based on the Akaike Information Criterion,
a well-studied metric of stochastic model
performance.  Experimental results show that the
approach is able to outperform a state-of-the-art
dependency-analysis algorithm, providing better
models for small datasets.},
pdf = {Papers/tonda.pdf}
}

@inproceedings{bach-2012,
author = {Bach, Benjamin and Spritzer, Andr{\'e} and Lutton, Evelyne and Fekete, Jean-Daniel},
title = {{Interactive Random Graph Generation with Evolutionary Algorithms}},
booktitle = {{Graph Drawing}},
year = {2012},
month = sep,
series = {Lecture Notes in Computer Science},
editor = {Springer},
publisher = {Springer},
abstract = {This article introduces an interactive system called Graph-Cuisine that lets
users steer an Evolutionary Algorithm (EA) to create random graphs
matching a set of user-specified measures. Generating random graphs
with particular characteristics is crucial for evaluating graph
algorithms, layouts and visualization techniques. Current random graph
generators provide limited control of the final characteristics of the
graphs they generate. The situation is even harder when one wants to
generate random graphs similar to a given one. This is due to the fact
that the similarity of graphs is often based on unknown parameters
leading to a long and painful iterative process including steps of
random graph generation, parameter changes, and visual inspection. Our
system is based on an approach of interactive evolutionary
computation. Fitting generator parameters to create graphs with
defined measures is an optimization problem, while judging the quality
of the resulting graphs often involves human subjective judgment. We
describe the graph generation process from a user's perspective,
provide details about our evolutionary algorithm and demonstrate how
Graph-Cuisine is employed to generate graphs that mimic a given real world
network.},
pdf = {Papers/GD-2012.pdf}
}

@incollection{Tonda2013,
author = {Tonda, Alberto and Lutton, Evelyne and Squillero, Giovanni and Wuillemin,
Pierre-Henri},
title = {A Memetic Approach to Bayesian Network Structure Learning},
booktitle = {EvoComplex, Applications of Evolutionary Computation. EvoStar, The leading European Event on Bio-Inspired Computation.},
publisher = {Springer},
month = {April},
year = {2013},
pages = {102-111},
note = {3-5 April, Vienna Austria},
abstract = {Bayesian networks are graphical statistical models that represent inference between data. For their effectiveness and versatility, they are widely adopted to represent knowledge in different domains. Several research lines address the NP-hard problem of Bayesian network structure learning starting from data: over the years, the machine learning community delivered effective heuristics, while different Evolutionary Algorithms have been devised to tackle this complex problem. This paper presents a Memetic Algorithm for Bayesian network structure learning, that combines the exploratory power of an Evolutionary Algorithm with the speed of local search. Experimental results show that the proposed approach is able to outperform state-of-the-art heuristics on two well-studied benchmarks.},
pdf = {Papers/evocomplex-2013-cr.pdf}
}

@inproceedings{EuroVis-2013,
author = {Boukhelifa, Nadia and  Cancino, Waldo and Bezerianos, Anastasia and Lutton, Evelyne},
title = {Evolutionary Visual Exploration: Evaluation With Expert Users},
booktitle = {EuroVis 2013, 15th annual Visualization Symposium},
month = {June},
year = {2013},
note = {June 17-21, Leipzig, Germany},
abstract = {We present an Evolutionary Visual Exploration (EVE) system
that combines visual analytics with stochastic optimisation to aid
the exploration of multidimensional datasets characterised by a
large number of possible views or projections. Starting from
dimensions whose values are automatically calculated by a PCA, an
interactive evolutionary algorithm progressively builds (or evolves)
non-trivial viewpoints in the form of linear and non-linear
dimension combinations, to help users discover new interesting views
and relationships in their data. The criteria for evolving new
dimensions is not known a priori and are partially specified by the
user via an interactive interface: (i) The user selects views with
meaningful or interesting visual patterns and provides a
satisfaction score. (ii) The system calibrates a fitness function
(optimised by the evolutionary algorithm) to take into account the user
input, and then calculates new views. Our method leverages automatic
tools to detect interesting visual features and human interpretation
to derive meaning, validate the findings and guide the exploration
without having to grasp advanced statistical concepts. To validate
our method, we built a prototype tool (EvoGraphDice) as an extension
of an existing scatterplot matrix inspection tool, and conducted an
observational study with five domain experts. Our results show that
EvoGraphDice can help users quantify qualitative hypotheses and try
out different scenarios to dynamically transform their
data. Importantly, it allowed our experts to think laterally, better
formulate their research questions and build new hypotheses for
further investigation.},
pdf = {Papers/eurovis2013.pdf}
}

@inproceedings{VizGec-2013,
author = {Cancino, Waldo and Boukhelifa, Nadia and Bezerianos, Anastasia and Lutton, Evelyne},
title = {Evolutionary Visual Exploration: Experimental Analysis of Algorithm Behaviour},
booktitle = {VizGEC 2013, Workshop on Visualisation Methods in Genetic and Evolutionary Computation. Genetic and Evolutionary Computation Conference, GECCO 2013.},
month = {July},
year = {2013},
annote = {July 6-10, Amsterdam, The Netherlands},
abstract = {Recent publications in the domains of interactive evolutionary
computation and data visualisation consider an emerging
topic coined Evolutionary Visual Exploration
(EVE). EVE systems combine visual analytics
with stochastic optimisation to aid the exploration of complex,
multidimensional datasets.  In this work we present an experimental
analysis of the behaviour of an EVE system that is dedicated to the
visualisation of multidimensional datasets, which are generally characterised by a large
number of possible views or projections. EvoGraphDice is an
interactive evolutionary system that progressively evolves a small
set of new dimensions, to provide new viewpoints on the dataset, in
the form of linear and non-linear combinations of the original
dimensions. The criteria for evolving new dimensions are not known a
priori and are partially specified by the user via an interactive
interface: (i) The user selects views with meaningful or interesting
visual patterns and provides a satisfaction score. (ii) The system
calibrates a fitness function to take into account the user input,
and then calculates new views, with the help of an evolutionary
engine. In previous work (an observational study), we showed that EvoGraphDice
was able to facilitate exploration''  tasks, helping
users to discover new interesting views and relationships in their
data.  Here, we focus on the system's convergence''
behaviour, conducting an experiment with users who have a
as a geometrical game, and collected data show that EvoGraphDice
is able to learn'' user preferences in a way that helps
users fulfill their task (i.e. converge to desired solutions).},
pdf = {Papers/vizgec2013.pdf}
}

@inproceedings{MIBISOC2013-Respi,
author = {Vidal, Franck P and  Villard, Pierre-Frederic and  Lutton, Evelyne},
title = {Automatic tuning of respiratory model for patient-based simulation},
booktitle = {MIBISOC2013, International Conferencel on Medical Imaging using Bio-Inspired and Soft Computing},
year = {2013},
month = {May},
annote = {15–17 May, Brussels, Belgium},
abstract = {This paper is an overview of a method recently published
in a biomedical journal (IEEE Transactions on Biomedical
Engineering). The method is based on an optimisation technique
called "evolutionary strategy" and it has been designed to
estimate the parameters of a complex 15-D respiration model.
This model is adaptable to account for patient's specificities.
The aim of the optimisation algorithm is to finely tune the
model so that it accurately fits real patient datasets. The final
results can then be embedded, for example, in high fidelity
simulations of the human physiology. Our algorithm is fully
automatic and adaptive. A compound fitness function has been
designed to take into account for various quantities that have
to be minimised (here topological errors of the liver and the
diaphragm geometries). The performance our implementation is
compared with two traditional methods (downhill simplex and
conjugate gradient descent), a random search and a basic realvalued
genetic algorithm. It shows that our evolutionary scheme
provides results that are significantly more stable and accurate
than the other tested methods. The approach is relatively generic
and can be easily adapted to other complex parametrisation
problems when ground truth data is available.},
pdf = {Papers/Vidal2013MIBISOC-Respiration.pdf}
}

@inproceedings{MIBISOC2013-PET,
author = {Vidal, Franck P. and Pavia, Yoann L.  and Rocchisani, Jean-Marie and   Louchet, Jean  and Lutton, Evelyne},
title = {Artificial Evolution Strategy for PET Reconstruction},
booktitle = {MIBISOC2013, International Conferencel on Medical Imaging using Bio-Inspired and Soft Computing},
year = {2013},
month = {May},
annote = {15–17 May, Brussels, Belgium},
abstract = {This paper shows new resutls of our artificial
evolution algorithm for positron emission tomography (PET)
reconstruction. This imaging technique produces datasets corresponding
to the concentration of positron emitters within the
patient. Fully three-dimensional (3D) tomographic reconstruction
requires high computing power and leads to many challenges.
Our aim is to produce high quality datasets in a time that is clinically
acceptable. Our method is based on a co-evolution strategy
called the "Fly algorithm". Each fly represents a point in space
and mimics a positron emitter. Each fly position is progressively
optimised using evolutionary computing to closely match the data
measured by the imaging system. The performance of each fly
is assessed based on its positive or negative contribution to the
performance of the whole population. The final population of
flies approximates the radioactivity concentration. This approach
has shown promising results on numerical phantom models.
The size of objects and their relative concentrations can be
calculated in two-dimensional (2D) space. In 3D, complex shapes
can be reconstructed. In this paper, we demonstrate the ability
of the algorithm to fidely reconstruct more anatomically realistic
volumes.},
pdf = {Papers/Vidal2013MIBISOC-PET.pdf}
}

@inproceedings{DREAM2013-Visu,
author = {Lutton, Evelyne and Tonda, Alberto and Gaucel, Sebastien and  Foucquier, Julie and Riaublanc, Alain and Perrot, Nathalie},
title = {Food model exploration through evolutionary optimization coupled with visualization: application to the prediction of a milk gel structure},
booktitle = {From Model Foods to Food Models. DREAM Project's International Conference},
year = {2013},
month = {June},
annote = {24 – 26 June 2013, Nantes, France},
pdf = {Papers/DREAM2013-Visu.pdf}
}

@inproceedings{DREAM2013-MilkGel,
author = {Descamps, Etienne and  Perrot, Nathalie and  Gaucel, Sebastien and Trelea, Cristian and  Riaublanc, Alain and  Mackie, Alan and   Lutton, Evelyne},
title = {Coupling deterministic and random sequential approaches for structure and texture prediction of a dairy oil-in-water emulsion},
booktitle = {From Model Foods to Food Models. DREAM Project's International Conference},
year = {2013},
month = {June},
annote = {24 – 26 June 2013, Nantes, France},
pdf = {Papers/DREAM2013-MilkGel.pdf}
}

@inproceedings{Lutton-EvoPar2014,
author = {Lutton, Evelyne and Gilbert, Hugo and   Cancino, Waldo and  Bach, Benjamin and  Parrend, Pierre and  Collet, Pierre},
title = {GridVis: Visualisation of Island-Based Parallel Genetic Algorithms},
booktitle = {Evopar2014, EvoApplications track of EvoStar, The leading European event on Bio-Inspired Computation},
year = {2014},
month = {April},
series = {LNCS },
publisher = {Springer},
note = {Best paper award of Evopar2014, 23-25 April, Granada, Spain},
abstract = {Island Model parallel genetic algorithms rely on various migration
models and their associated parameter settings. A fine understanding
of how the islands interact and exchange informations is an important
GridVis, an interactive tool for visualising the exchange of individuals
and the propagation of fitness values between islands. We performed several
experiments on a grid and on a cluster to evaluate GridVis’ ability
to visualise the activity of each machine and the communication flow
between machines. Experiments have been made on the optimisation of
a Weierstrass function using the EASEA language, with two schemes: a
scheme based on uniform islands and another based on specialised islands
(Exploitation, Exploration and Storage Islands).},
pdf = {Papers/Lutton-EvoPar2014.pdf}
}

@inproceedings{Gaucel-EuroGP2014,
author = {Gaucel, Sebastien and Keijzer, Maarten and Lutton, Evelyne and Tonda, Alberto},
title = {Learning Dynamical Systems Using Standard Symbolic Regression},
booktitle = {EuroGP track of EvoStar, The leading European event on Bio-Inspired Computation},
year = {2014},
series = {LNCS},
month = {April},
publisher = {Springer},
note = {Best paper award of EvoStar2014, 23-25 April, Granada, Spain},
abstract = {Symbolic regression has many successful applications in learning free-form regular equations from data. Trying to apply the same approach to differential equations is the logical next step: so far, however, results have not matched the quality obtained with regular equations, mainly due to additional constraints and dependencies between variables that make the problem extremely hard to tackle. In this paper we propose a new approach to dynamic systems learning. Symbolic regression is used to obtain a set of first-order Eulerian approximations of differential equations, and mathematical properties of the approximation are then exploited to reconstruct the original differential equations. Advantages of this technique include the de-coupling of systems of differential equations, that can now be learned independently; the possibility of exploiting established techniques for standard symbolic regression, after trivial operations on the original dataset; and the substantial reduction of computational effort, when compared to existing ad-hoc solutions for the same purpose. Experimental results show the efficacy of the proposed approach on an instance of the Lotka-Volterra model.},
pdf = {Papers/evostar-2014-differential-equations.pdf}
}

@inproceedings{Tonda-EA2013,
author = {Tonda, Alberto and Spritzer, Andre and Lutton, Evelyne},
title = {Balancing User Interaction and Control in Bayesian Network Structure Learning},
booktitle = {Artificial Evolution Conference},
year = {2013},
series = {LNCS 8752},
month = {October},
publisher = {Springer},
note = {21-23 October 2013, Bordeaux, France},
abstract = {In this paper we present a study based on an evolutionary
framework to explore what would be a reasonable compromise between
interaction and automated optimisation in finding possible solutions for
a complex problem, namely the learning of Bayesian network structures,
an NP-hard problem where user knowledge can be crucial to distinguish
among solutions of equal fitness but very different physical meaning.
Even though several classes of complex problems can be effectively tackled
with Evolutionary Computation, most possess qualities that are dif-
difficult to directly encode in the fitness function or in the individual's
genotype description. Expert knowledge can sometimes be used to integrate
the missing information, but new challenges arise when searching
for the best way to access it: full human interaction can lead to the
well-known problem of user-fatigue, while a completely automated evolutionary
process can miss important contributions by the expert. For
our study, we developed a GUI-based prototype application that lets
an expert user guide the evolution of a network by alternating between
fully-interactive and completely automatic steps. Preliminary user tests
were able to show that despite still requiring some improvements with
regards to its efficiency, the proposed approach indeed achieves its goal
of delivering satisfying results for an expert user.},
pdf = {Papers/Tonda-EA2013.pdf}
}

@inproceedings{Chabin-EA2016,
author = {Chabin, Thomas
and Tonda, Alberto
and Lutton, Evelyne},
editor = {Bonnevay, St{\'e}phane
and Legrand, Pierrick
and Monmarch{\'e}, Nicolas
and Lutton, Evelyne
and Schoenauer, Marc},
title = {How to Mislead an Evolutionary Algorithm Using Global Sensitivity Analysis},
booktitle = {Artificial Evolution: 12th International Conference, Evolution Artificielle, EA 2015, Lyon, France, October 26-28, 2015. Revised Selected Papers},
year = {2016},
publisher = {Springer International Publishing},
pages = {44--57},
doi = {10.1007/978-3-319-31471-6_4},
url = {http://dx.doi.org/10.1007/978-3-319-31471-6_4},
abstract = {The idea of exploiting Global Sensitivity Analysis (GSA) to make Evolutionary Algorithms more effective seems very attractive: intuitively, a probabilistic analysis can prove useful to a stochastic optimisation technique. GSA, that gathers information about the behaviour of functions receiving some inputs and delivering one or several outputs, is based on computationally-intensive stochastic sampling of a parameter space. Nevertheless, efficiently exploiting information gathered from GSA might not be so straightforward. In this paper, we present three mono- and multi-objective counterexamples to prove how naively combining GSA and EA may mislead an optimisation process.},
pdf = {Papers/Chabin2015.pdf}
}

@inproceedings{Chabin:2015:GSA:2739482.2764675,
author = {Chabin, Thomas and Tonda, Alberto and Lutton, Evelyne},
title = {Is Global Sensitivity Analysis Useful to Evolutionary Computation?},
booktitle = {Proceedings of the Companion Publication of the 2015 Annual Conference on Genetic and Evolutionary Computation},
series = {GECCO Companion '15},
year = {2015},
isbn = {978-1-4503-3488-4},
pages = {1365--1366},
numpages = {2},
url = {http://doi.acm.org/10.1145/2739482.2764675},
doi = {10.1145/2739482.2764675},
acmid = {2764675},
publisher = {ACM},
address = {New York, NY, USA},
keywords = {easea, evolutionary computation, global sensitivity analysis, real-valued optimization},
abstract = {Global Sensitivity Analysis (GSA) studies how uncertainty in the inputs of a system influences uncertainty in its outputs. GSA is extensively used by experts to gather information about the behavior of models, through computationally-intensive stochastic sampling of parameters' space. Some studies propose to make use of the considerable quantity of data acquired in this way to optimize the model parameters, often resorting to Evolutionary Algorithms (EAs). Nevertheless, efficiently exploiting information gathered from GSA might not be so straightforward. In this paper, we present a counterexample followed by experimental results to prove how naively combining GSA and EA can bring about negative outcomes.},
pdf = {Papers/evostar2015.pdf}
}

@inproceedings{perrot:hal-00840747,
title = {{The complex system science for optimal strategy of management of a food system: the camembert cheese ripening}},
author = {Perrot, Nathalie and Mesmoudi, Salma and Reuillon, Romain and Lutton, Evelyne and Alavarez, Isabelle},
url = {https://hal.archives-ouvertes.fr/hal-00840747},
booktitle = {{International Congres of Engineering and Food}},
pages = {325-331},
year = {2011},
month = may,
keywords = {cheese ripening ; viability study ; optimal strategy of management ; computing grid ; multiobjective optimisation},
pdf = {https://hal.archives-ouvertes.fr/hal-00840747/file/Perrot2011.pdf},
abstract = {Significant advances are needed for food systems in terms of real-time prognosis capability developments, incorporating large scale modelling, distributed simulation and optimisation, and complete integration of the methods and algorithms. The goal is to be able to develop new paradigms at the frontier of life science and computing science for the management of systems like food systems. In parallel, just in the process of emerging and linked to these same questions is the science of complex systems, that proposes ways to understand systems located in turbulent, instable and changing environments. This paper points out and illustrates the interest to develop an approach adapting and coupling some fundamental tools of the complex system science. It combines viability and robustness analysis, multi-objective optimisation calculus and high computational performance using a computing grid. Adapted to the camembert cheese ripening, it has led to propose new strategies for control the process. One solution of the calculated pareto front, is compared to two trajectories tested during experiments led on a pilot, one standard and another optimized one. The total mass loss deviation for the calculated trajectory by comparison to the standard one is 0.04 kg in the same time and for identical microorganisms behaviour.}
}

@inproceedings{LuttonKeynoteFoodSim2016,
author = {Lutton, Evelyne and  Tonda, Alberto and Boukhelifa, Nadia and Perrot, Nathalie},
title = {Complex Systems in Food Science: Human Factor Issues},
booktitle = {FOODSIM'2016, April 3-7, 2016, Catholic University Leuven, Ghent, Belgium},
year = {2016},
note = {Keynote Invited Speech},
pdf = {Papers/lutton_foodsim_2016.pdf},
abstract = {Building in-silico decision making systems is essential in the food domain, albeit highly difficult. This task strongly relies on multidisciplinary research and in particular on advanced techniques from artificial intelligence. The success of such systems depends on how well they cope with the complex properties of food processes, such as the large variety of interacting components including those related to human expertise; and their dynamic, non-linear, multi-scale, uncertain and non-equilibrium behaviors. Robust stochastic optimization techniques, evolutionary computation and in particular Interactive Evolutionary Computation (IEC) seem to be a fruitful framework for developing food science models. A Human-Centered approach to Interactive Evolutionary Computation is discussed in this paper as a possible pertinent way to cope with challenges related to human factors in this context.}
}

@inproceedings{Boukhelifa-CHIWorkshop2016,
author = {Boukhelifa, Nadia and Bezerianos, Anastasia and  Tonda, Alberto and Lutton, Evelyne},
title = {Research Prospects in the Design and Evaluation of Interactive Evolutionary Systems for Art and Science},
booktitle = {ACM CHI Workshop on Human Centered Machine Learning, San Jose, CA, United States},
year = {2016},
url = {http://hcml2016.goldsmithsdigital.com},
pdf = {Papers/boukhelifa_hcml_2016.pdf},
abstract = {We report on our experience in designing and evaluating seven applications from seven different domains using an interactive evolutionary approach. We conducted extensive evaluations for some of these applications, both quantitative and qualitative, and collected rich feedback from our ongoing collaborations with end-user scientists and artists. To ground our discussion, we refer to two applications, from art and science, as exemplars of our work in order to identify strengths and weaknesses in our approach. We argue that human-centered design could play an important role in addressing some of the identified issues such as the “black box” and the “user-bottleneck” effects. We discuss research opportunities requiring human-computer interaction methodologies in order to support both the visible and hidden roles that humans play in interactive evolutionary computation and machine learning.}
}

@inproceedings{Boukhelifa-RV3-2015,
author = {Boukhelifa, Nadia and Bezerianos, Anastasia and Lutton, Evelyne},
title = {A Mixed Approach for the Evaluation of a Guided Exploratory Visualization System},
booktitle = {EuroVis Workshop on Reproducibility, Verification, and Validation in Visualization (EuroRV3), 25-26 May},
year = {2015},
pdf = {Papers/boukhelifa_rv3_2015.pdf},
abstract = {We summarise and reflect upon our experience in evaluating a guided exploratory visualization system. Our system guides users in their exploration of multidimensional datasets to pertinent views of their data, where the notion of pertinence is defined by automatic indicators, such as the amount of visual patterns in the view, and subjective user feedback obtained during their interaction with the tool. To evaluate this type of system, we argue for deploying a collection of validation methods that are: user-centered, observing the utility and effectiveness of the system for the end-user; and algorithm-centered, analysing the computational behaviour of the system. We report on observations and lessons learnt from working with expert users both for the design and the evaluation of our system.}
}

@inproceedings{floury:hal-01250499,
title = {{Digestion of milk protein gels in simulated gastric environment: exploration of the disintegration process and diffusion behavior of pepsin}},
author = {Floury, Juliane and Bianchi, Tiago and Thevenot, Jonathan and Dupont, Didier and Jamme, Frederic and Lutton, Evelyne and Panouille, Maud and Boue, Fran{\c c}ois and Le Feunteun, Steven},
url = {https://hal.archives-ouvertes.fr/hal-01250499},
booktitle = {{2nd Food Structure and Functionality Forum Symposium - from Molecules to Functionality}},
year = {2016},
month = feb,
abstract = {The gastric digestion comprises three phases: physical disintegration, chemical breakdown and nutrient release. Controlling food proteins gelation conditions leads to the formation of particles with specific structural features that change protein digestibility. The development of foods with specific proteolysis rates allows their fit to different ‘nutritional vulnerable groups’ (newborn,elderly, obese, athletes) needs. The hypothesis is that the overall proteolysis reaction rate is limited by the pepsin diffusion rate within the protein structures generated in the stomach.Three milk gels with the same protein concentration but different microstructures were prepared either by rennet, acid coagulation of non-fat milk, or heat treatment of whey proteins. The disintegration of the different gel networks was investigated under digestion in simulated gastric conditions, and the effect of the acidic environment uncoupled from the enzyme effect. The first effect was monitored during 30 minutes before addition of pepsin for two hours of digestion.Kinetics of the process was surveilled by particle size measurements and matter loss.Proteolysis was characterized by SDS-PAGE, and diffusion of fluorescently labelled (FITC) pepsin within the gels was followed using fluorescent recovery after photobleaching with confocal microscopy. In contrast to acid and whey protein gels, rennet gels underwent large microstructural modifications under acidic conditions, forming extremely compact protein aggregates that significantly slowed down pepsin diffusion rates through the modified gel network. Microscopic observations showed slower morphological evolution during the enzymatic digestion, whose rates depended on the gel considered. Moreover, pepsin was able to diffuse within the aggregates.Recent microscopic observations obtained by tryptophan fluorescence imaging on the SOLEIL synchrotron DISCO beamline suggest that the particles were enzyme digested inside out. In this study, we succeeded in interpret the digestion phases as microstructural transformation,enzymatic reaction and diffusion phenomena in order to further dismantle the digestion process from a process engineering perspective.}
}

@inproceedings{Poster-EMC2016,
author = {Thevenot, J. and Floury, J. and Jamme, F. and Panouille, M. and Lutton, E. and Boue, F. and Dupont, D. and Le Feunteun, S.},
title = {Gastric digestion of milk protein gels as assessed by time-lapse Synchrotron UV-microscopy},
booktitle = {Presented at The 16. European microscopy CongressEMC2016 , 28 Aug - 2 Sept, Lyon, FRANCE},
year = {2016},
url = {http://prodinra.inra.fr/record/368976},
abstract = {Gastric digestion is the result of physical disintegration, acidic hydrolysis and enzymatic reactions leading to the release of nutrients which are absorbed in the upper intestinal tract. Protein is one of the essential macro-nutrient and can be eaten in a great variety of forms (solubilized, cross-linked, in their native or denatured states). Controlling food protein gelation conditions result in the formation of particles with specific structural features. Several in vivo and in vitro studies have shown an influence of the macro- and microstructure on the kinetics of milk protein hydrolysis. Nevertheless, the mechanisms by which the structure of dairy gels can affect the digestion kinetics remain largely unknown. The aim of the study was to assess the part play by HCl and gastric enzyme (i.e. pepsin) during gastric digestion using a dynamic and label-free imaging technique on the DISCO beamline of Synchrotron SOLEIL to visualize in situ the milk protein gels breakdown kinetics. The DISCO beamline uses the deep ultraviolet range to probe the intrinsic UV tryptophan fluorescence without the need of specific external probes. Two milk gels with the same protein concentration but different microstructures were prepared either by rennet or acid coagulation of non-fat milk. The disintegration of the different networks was monitored under digestion at body temperature in simulated gastric fluids and the effect of the acidic environment uncoupled from the enzyme effect. The evolution of particle area and mean fluorescence intensity has been determined, and used to estimate the kinetics of food particles breakdown. The kinetics of acid gel in vitro digestion was significantly reduced compared to rennet gel. Our data indicate that rennet gel has a two-step behavior during the acidification phase with a swelling followed by a contraction of the particle, not observed for acid gel. In addition, these microstructural modifications of rennet gel affect negatively the enzymatic breakdown kinetics of particles compared to acid gel. This study leads to original methodological developments both from the point of view of the acquisition of data and their joint analysis. Getting in situ information about digestion kinetics, microstructural transformation and enzymatic reaction, allow further analysis of the digestion process.}
}

@inproceedings{Lutton-Perrot-DOF2016,
author = {Lutton, Evelyne and Perrot, Nathalie},
title = {Complex systems in food science: Human factor issues},
booktitle = {DOF 2015. 6th International Symposium on Delivery of Functionality in Complex Food Systems Physically-Inspired Approaches from the Nanoscale to the Microscale.  July 14 to 17, Maison de la Chimie - Paris, France},
year = {2015},
note = {Keynote Speech},
abstract = {Complex systems approaches are an attractive way to model food systems, as it yield powerful tools to address challenging issues like multi-scales and big data issues. The specifics of food domain however raises the focus on another crucial issue that is what can be called the human factor. At every stage, actually, human expertise and decision making have a major importance for a better understanding of food systems. Dealing with this is not a simple and solved problem. This talk illustrates some prospective research in this direction.},

@inproceedings{Descamps-DOF2016,