diff --git a/build.py b/build.py index b281b08..ed09915 100644 --- a/build.py +++ b/build.py @@ -48,14 +48,13 @@ class Website: filename = page.get_filename() html_content = page.get_content() html_output = template.render( - title=metadata['title'], - year=metadata['year'], - date=metadata['date'], - tags=metadata.get('tags', []), - image=metadata['image'], - showcase=metadata['showcase'], - credits=metadata['credits'], - references=page.get_references(), + title=metadata.get('title', ''), + year=metadata.get('year', ''), # Assuming year is a string + date=metadata.get('date', ''), # Assuming date is a string + tags=metadata.get('tags', []), + image=metadata.get('image', ''), # Assuming image is a string + showcase=metadata.get('showcase', []), + credits=metadata.get('credits', []), # Assuming credits is a string content=html_content) with open(os.path.join(self.output_dir_root, filename), 'w', encoding='utf8') as output_file: output_file.write(html_output) @@ -105,6 +104,7 @@ class Website: def format_content(self, content): # convert all (link)(src) to tags content = re.sub(r'\(([^)]+)\)\[([^\]]+)\]', r'\1', content) + # convert all video links to embedded code return content def fetch_all_images(self): diff --git a/content/about.md b/content/about.md index 11bb822..2eb27e0 100644 --- a/content/about.md +++ b/content/about.md @@ -1,5 +1,4 @@ --- -cv: some-path instagram: https://www.instagram.com/cailean.finn/ git: https://git.fioruil.ie/ --- diff --git a/content/posts/ai-x-body.md b/content/posts/ai-x-body.md index 962587c..38837ff 100644 --- a/content/posts/ai-x-body.md +++ b/content/posts/ai-x-body.md @@ -5,19 +5,6 @@ year: 2022 image: aixbody.webp tags: [HPE, AI] date: 2024-06-07 -showcase: - - name: Exposed Torino Foto Festival - year: 2024 - location: Turin - - name: Another Showcase Festival - year: 2025 - location: Rome -credits: - Cailean: CT - Leon: Camera -references: - - title: some-title - link: https://www.caileanfinn.ie draft: false --- This publication was created in collaboration with AIxDesign, as part of their AI Playground (S01) which ran from May 2022-February 2023. diff --git a/content/posts/demo.md b/content/posts/demo.md new file mode 100644 index 0000000..8b593d0 --- /dev/null +++ b/content/posts/demo.md @@ -0,0 +1,37 @@ +--- +title: DEMO +type: Project +year: 2024 +image: demo.jpg +tags: [AI, NLP, Simulation] +date: 2024-06-07 +showcase: + - name: Speak It Now Eat It, Revision Performing Arts Festival + year: 2023 + location: Belfast +credits: + Eoin O'Sullivan: Sound design +references: + - title: some-title + link: https://www.caileanfinn.ie +draft: false +--- +(O)MACHINE is a real-time generative performance that employs contemporary machine learning algorithms to explore how we humanise technologies.The architecture of this system was designed to emulate our stream of consciousness, where the machine is trapped in this perpetual cycle through processes of reflection and feedback. As questions begin to arise around the sentience or ‘intelligence’ of these thinking machines, it has become even more important to explore our relationship with machines, and how it continues to evolve. By engaging with its output, it positions artificial intelligence as both a subject and tool. Through this approach, we may begin to expand the dynamics of this connection through new methods of collaboration. From this interaction, we can continue to learn more about how these systems function, how they think, if they even think at all, or can it help us think? + +Sound design by (Eoin O'Sullivan)[https://eoin-osullivan.bandcamp.com/] + +* One +* Two +* Three + +__HELLO__ + +## WORLD ## + +
+ +
+ +![alt text](/images/demo.jpg "Title") + +_WHAT_ \ No newline at end of file diff --git a/public/about.html b/public/about.html index 9c23b2e..428acaa 100644 --- a/public/about.html +++ b/public/about.html @@ -33,8 +33,6 @@
caileannn@gmail.com
-
cv
-
instagram
git
diff --git a/public/articles/(o)machine.html b/public/articles/(o)machine.html index 6e2c00b..ea7901b 100644 --- a/public/articles/(o)machine.html +++ b/public/articles/(o)machine.html @@ -40,17 +40,42 @@

(O)MACHINE is a real-time generative performance that employs contemporary machine learning algorithms to explore how we humanise technologies.The architecture of this system was designed to emulate our stream of consciousness, where the machine is trapped in this perpetual cycle through processes of reflection and feedback. As questions begin to arise around the sentience or ‘intelligence’ of these thinking machines, it has become even more important to explore our relationship with machines, and how it continues to evolve. By engaging with its output, it positions artificial intelligence as both a subject and tool. Through this approach, we may begin to expand the dynamics of this connection through new methods of collaboration. From this interaction, we can continue to learn more about how these systems function, how they think, if they even think at all, or can it help us think?

Sound design by Eoin O'Sullivan

- diff --git a/public/articles/ai-x-body.html b/public/articles/ai-x-body.html index 7b010aa..cf08c45 100644 --- a/public/articles/ai-x-body.html +++ b/public/articles/ai-x-body.html @@ -39,17 +39,16 @@

The text explores the evolution of human pose estimation and recognition technologies through tracing their historical development, their contemporary applications, and how artists and creative practitioners have employed such tools in their artistic process.

Article 📎

- diff --git a/public/articles/beauty-and-the-beep.html b/public/articles/beauty-and-the-beep.html index 1715727..8f472df 100644 --- a/public/articles/beauty-and-the-beep.html +++ b/public/articles/beauty-and-the-beep.html @@ -43,17 +43,54 @@

Wondering who would buy an automated mechanical pet to assist and live in their home, the film explores Boston Dynamics' datafied definition of a home or what it takes for such a personal and intimate space to be standardised for computer vision to function. Bertil — a synthetic chair inspired by IKEA’s first 3D rendered image for their print catalogue, which marked their shift to rendered imagery — wanders through this seemingly simple virtual home, interacting with its objects, in search of some answers. Navigating the home for Bertil is no easy task, as they encounter the daily life noise that is littered throughout the home. A banana trips them, they cannot sit, they get stuck on a treadmill and why is there a toy pony on the floor? Revealing how the impossibility of gathering training data in the home has led to the widespread use of synthetic data, Bertil reminds us that the home is private and not for capture.

For this work, I collaborated with Simone C Niquille as a Creative Technologist. In the process of creating Beauty and The Beep, the chair was trained using reinforcement learning alogrthims in the Unity game engine. The training process took inspiration from Boston Dynamic's approach in the training of their SpotMini, as well as tradiontional DeepMimic environments for Reinforcement Learning research. We chose to use Unity for this project, as it allowed us to work with the ML-Agents Package - an experimental Reinforcement Learning framework, which wraps complex reinforcement learning algorithms/methods into components which are more acessible for developers. Even though this package has been forgotten by Unity, for the most part, working with a user-friendly game engine was key in creating simuated environments for the 🪑 to explore.

- diff --git a/public/articles/data.html b/public/articles/data.html index 95b3a9f..c94ddb2 100644 --- a/public/articles/data.html +++ b/public/articles/data.html @@ -41,17 +41,54 @@

The website presents the interview in both linear and non-linear formats. By utilising machine learning and natural language processing, text segments extracted from the interview were ranked against key topics; creating a higher-dimensional understanding, and projection of the interview - which is commonly referred to as the latent space. After, a t-SNE algorithm was applied to high-dimensional space, flattening it into two dimensions, represented in the interactive map; allowing the user to navigate the interview from the perspective of the machine.

Designed and developed using p5js, by Cailean Finn.

- diff --git a/public/articles/demo.html b/public/articles/demo.html new file mode 100644 index 0000000..fa1ecd6 --- /dev/null +++ b/public/articles/demo.html @@ -0,0 +1,98 @@ + + + + + + cailean.finn + + + + + + + +
+ + +
+
+
DEMO, 2024
+
+ +
✳ AI
+ +
✳ NLP
+ +
✳ Simulation
+ +
+
+ +
+
+

(O)MACHINE is a real-time generative performance that employs contemporary machine learning algorithms to explore how we humanise technologies.The architecture of this system was designed to emulate our stream of consciousness, where the machine is trapped in this perpetual cycle through processes of reflection and feedback. As questions begin to arise around the sentience or ‘intelligence’ of these thinking machines, it has become even more important to explore our relationship with machines, and how it continues to evolve. By engaging with its output, it positions artificial intelligence as both a subject and tool. Through this approach, we may begin to expand the dynamics of this connection through new methods of collaboration. From this interaction, we can continue to learn more about how these systems function, how they think, if they even think at all, or can it help us think?

+

Sound design by Eoin O'Sullivan

+
    +
  • One
  • +
  • Two
  • +
  • Three
  • +
+

HELLO

+

WORLD

+
+ + +
+ +

alt text

+

WHAT

+
+ + +
+
Exhibited:
+
+ +
+

(2023)

+

Speak It Now Eat It, Revision Performing Arts Festival,

+

Belfast

+
+ +
+
+ + + +
+
Credits:
+
+ +
+

+

Eoin O'Sullivan,

+

Sound design

+
+ +
+
+ + + + +
+ +
+
+
+ +
+ + \ No newline at end of file diff --git a/public/articles/dwelling.html b/public/articles/dwelling.html index 378f6d9..be7e3fb 100644 --- a/public/articles/dwelling.html +++ b/public/articles/dwelling.html @@ -40,17 +40,48 @@

Dwelling is a dynamic live performance and theatre installation created by Peter Power and Leon Butler. The performance explores the periphery of cultural isolation, and the dispersal of self across the multimedial, delving into themes of digital mortality, transformation, and rebirth. The performance takes place in the fragments of a home with dance performances by Robyn Byrne and Rosie Stebbing. The characters moves between the digital and real space through motion capture data in conjunction with live tracking. Over the duration of the performance, Rosie starts to form a connection between her physical self, and the digital divide.

The virtual world was created entirely within Unity. Data was captured from Robyn's movement through various methods, such as the Perception Neuron mo-cap suit, as well as emerging monocular 3d human pose detection models. Unity's particle system was used extensively in the project, converting point cloud and positional data into emergent movement, and ethereal landscapes.

- diff --git a/public/articles/electronic-image.html b/public/articles/electronic-image.html index 17e92e9..51a69bd 100644 --- a/public/articles/electronic-image.html +++ b/public/articles/electronic-image.html @@ -43,17 +43,48 @@

The three studies have been shaped by the experimental processes, techniques, and philosophies of the pioneering artists working with video. The artists in question, specifically the works of Steina and Woody Vasulka, who were driven by their yearning to understand the electronic signal and to formulate an electronic lexicon. The work, in its entirety, is an investigation of the unique set of “codes” embedded within the language of the video signal, consequently, recognising the electronic image as an object of time, energy, and it's programmable building element – the waveform.

📎thesis.pdf

- diff --git a/public/articles/latent-mirror.html b/public/articles/latent-mirror.html index f1009d7..bf53311 100644 --- a/public/articles/latent-mirror.html +++ b/public/articles/latent-mirror.html @@ -43,17 +43,48 @@

The visual element of the performance was real-time and audio reactive, which captured the facial structure of the performing sound artist. By utilising Machine Learning Models, the captured face was manipulated and distorted further to animate another portrait, in an attempt to deconstruct and isolate key compositional elements of the 'subject'. Through this work, we hoped to reflect on our digital identity, and highlight the disconnection between our physical and virtual presence.

Created in TouchDesigner.

- diff --git a/public/articles/undefined.html b/public/articles/undefined.html index c449e59..16ce912 100644 --- a/public/articles/undefined.html +++ b/public/articles/undefined.html @@ -40,17 +40,48 @@

The online version of Undefined Panorama allows people to move between micro and macro perspectives of global, national and local events. In moving between these scales, Yang Ah Ham aims to open up questions about our relations to these events, and to generate new meanings by altering the scale of observation.

This website was commissioned by 2022 Seo-Seoul Museum of Art Pre-opening Public Program Exceptional Times, Uncertain Moves, and created with support from the Arts Council Korea.

- diff --git a/public/css/styles.css b/public/css/styles.css index 88e2456..21af17f 100644 --- a/public/css/styles.css +++ b/public/css/styles.css @@ -324,6 +324,10 @@ body { cursor: pointer; } + #project-body p { + font-size: 20px; + } + #project-body a{ color: rgb(73, 146, 248); text-decoration: none; @@ -332,6 +336,11 @@ body { cursor: pointer; } + p img { + width: 100%; + height: auto; + } + #project-cover img{ /* border: 2px solid rgb(255 149 149); */ width: 100%; @@ -339,8 +348,7 @@ body { } #pr-header { - font-family: 'Redacted Reg'; - letter-spacing: 2px; + font-family: 'Gothic A1', sans-serif; font-size: 25px; color: rgb(245, 102, 102); height: fit-content; @@ -361,7 +369,7 @@ body { color: rgb(73, 146, 248); text-decoration: none; font-family: 'IBM Plex Mono', monospace; - font-size: 17.5px; + font-size: 20px; padding-left: 2.5px; padding-right: 2.5px; font-style: italic; @@ -430,6 +438,44 @@ body { height: auto; } + #credit-cont { + display: flex; + flex-direction: column; + gap:5px; + } + + #showcase-cont { + display: flex; + flex-direction: column; + gap:5px; + } + + .showcase { + display: flex; + flex-direction: row; + gap: 5px; + height: fit-content; + flex-wrap: wrap; + } + + .showcase p { + margin: 0; + padding: 0; + } + + .credit { + display: flex; + flex-direction: row; + gap: 5px; + height: fit-content; + flex-wrap: wrap; + } + + .credit p { + padding: 0; + margin: 0; + } + @@ -533,8 +579,6 @@ body { } #pr-header { - font-family: 'Redacted Reg'; - letter-spacing: 2px; font-size: 20px; margin-bottom: 25px; } @@ -542,6 +586,11 @@ body { #pr-list a { font-size: 15px; } + + #project-body p { + font-size: 15px; + } + } @media only screen and (max-width: 480px) { @@ -560,4 +609,6 @@ body { .gallery-image{ max-width: 90%; } + + } diff --git a/public/gallery.html b/public/gallery.html index ed5e926..f5c1d1b 100644 --- a/public/gallery.html +++ b/public/gallery.html @@ -31,6 +31,8 @@ + + diff --git a/public/images/demo.jpg b/public/images/demo.jpg new file mode 100644 index 0000000..b724490 Binary files /dev/null and b/public/images/demo.jpg differ diff --git a/public/js/main.js b/public/js/main.js index af1e078..0b7ffb0 100644 --- a/public/js/main.js +++ b/public/js/main.js @@ -13,6 +13,7 @@ class PickHelper { this.pickedObject = null; this.lastObjectPicked = null; this.sameObjectPicked = false; + this.initalPick = false; } pick(normalizedPosition, scene, camera, time) { @@ -137,10 +138,14 @@ const object_list = [] const object_count = 20; const fontLoader = new FontLoader(); let group = new THREE.Group(); +let pastArticle; function init() { + + // Set to a value outside the screen range + pickPosition.x = -100000; + pickPosition.y = -100000; - // Texture Loader const loader = new THREE.TextureLoader(); texture = loader.load('/images/website/checker.png'); @@ -308,7 +313,8 @@ window.addEventListener('resize', () => { const pickedArticle = object_list.find(article => article.mesh === pickHelper.pickedObject); if (pickedArticle) { document.body.style.cursor = 'pointer'; - if (!isHovering) + if (pickedArticle.name != pastArticle) + console.log('updated!') UpdateText(pickedArticle.name); }else{ document.body.style.cursor = 'default'; @@ -319,7 +325,7 @@ window.addEventListener('resize', () => { function UpdateText(text) { MeasureText(text); - isHovering = true + pastArticle = text } function ClearTextGeoList() { @@ -427,35 +433,5 @@ function createTextGeometry(text, size) { window.addEventListener('mouseleave', clearPickPosition); window.addEventListener('click', objectClicked) - // Add touch event listeners - window.addEventListener('touchstart', onTouchStart, {passive: false}); - window.addEventListener('touchmove', onTouchMove, {passive: false}); - window.addEventListener('touchend', onTouchEnd, {passive: false}); - window.addEventListener('touchcancel', clearPickPosition); - - let touchStartTime; - const touchHoldDuration = 500; // Duration in milliseconds to distinguish between tap and hold - - function onTouchStart(event) { - touchStartTime = Date.now(); - setPickPosition(event.touches[0]); - } - - function onTouchMove(event) { - setPickPosition(event.touches[0]); - } - - function onTouchEnd(event) { - const touchDuration = Date.now() - touchStartTime; - clearPickPosition(); - if (touchDuration < touchHoldDuration) { - // It's a tap - objectClicked(event); - } else { - // It's a hold - // Do nothing extra, as hover effect should already be handled by setPickPosition - } - } - // Initialize the application init(); diff --git a/public/json/articles.json b/public/json/articles.json index 32fa860..c4a1f24 100644 --- a/public/json/articles.json +++ b/public/json/articles.json @@ -19,6 +19,11 @@ "filename": "/articles/data.html", "image": "data.png" }, + { + "name": "DEMO", + "filename": "/articles/demo.html", + "image": "demo.jpg" + }, { "name": "Dwelling", "filename": "/articles/dwelling.html", diff --git a/public/list.html b/public/list.html index 9a14955..404238c 100644 --- a/public/list.html +++ b/public/list.html @@ -144,6 +144,25 @@

The BIG D.A.T.A Interview

+
+ +
+ +
+ +
+ +
✳ AI
+ +
✳ NLP
+ +
✳ Simulation
+ +
+ +

DEMO

+
+
diff --git a/templates/article.html b/templates/article.html index 769747d..fadb650 100644 --- a/templates/article.html +++ b/templates/article.html @@ -15,6 +15,38 @@
{{ content }}
+ + {% if showcase %} +
+
Exhibited:
+
+ {% for show in showcase %} +
+

({{ show['year'] }})

+

{{ show['name'] }},

+

{{ show['location'] }}

+
+ {% endfor %} +
+
+ {% endif %} + + {% if credits %} +
+
Credits:
+
+ {% for credit in credits %} +
+

+

{{ credit }},

+

{{ credits[credit]}}

+
+ {% endfor %} +
+
+ {% endif %} + + {% if references %}
+ {% endif %} +
-
{% endblock %}