diff --git a/allzweckmesser/scanner.py b/allzweckmesser/scanner.py
index 8182cc8a476dbb543e3325242822b58ee3d547ed..803e9926e94d580650e91e9f83ced8262a7c6b50 100644
--- a/allzweckmesser/scanner.py
+++ b/allzweckmesser/scanner.py
@@ -401,6 +401,65 @@ def join_u_diphthong_syllables(token, syllables):
     return syllables
 
 
+def generate_synizesis(reading):
+        
+    syn_list = list() 
+    
+    for token in reading.tokens:
+    
+        for i, syl in enumerate(token.syllables[:-1]):
+                
+            other_syl = token.syllables[i+1]
+            new_text = syl.text+other_syl.text
+
+            match = re.search(r'[aeiouvy]h?[aeiouvy]', new_text)
+                
+            if match:  
+                in_syllables = match.start() < len(syl.text) < match.end()
+                
+                if in_syllables:
+                    syn_list.append([syl.id, other_syl.id, new_text, match.groups(0)])
+            else:
+                continue
+                
+                
+    reading_copies = multiply_readings([reading], len(syn_list)*2)
+                
+    combinations = list(product([0,1], repeat=len(syn_list)))
+    
+    for i, reading in enumerate(reading_copies):
+        combi = combinations[i]
+        syn_dict = {x[0]:x for j,x in enumerate(syn_list) if combi[j] != 0}
+        
+        for token in reading.tokens:
+            new_syllables = list()
+            continue_ = False
+            for j,syllable in enumerate(token.syllables[:-1]):
+
+                if continue_:
+                    continue_ = False
+                    continue
+
+                elif syllable.id in syn_dict:
+                    continue_ = True
+                    syllable.text = syn_dict[syllable.id][2]
+                    syllable.span = [syllable.span[0], token.syllables[j+1].span[1]]
+                    syllable.syllable_length = 2
+                    syllable.vowel_length = 2
+                    syllable.phenomena['synizesis'] = Phenomenon(chars=syn_dict[syllable.id][3])
+                    for s in token.syllables[j+2:]:
+                        s.id -= 1
+                        
+                new_syllables.append(syllable)
+
+            if not continue_ and token.syllables:
+                new_syllables.append(token.syllables[-1])
+            
+            token.syllables = new_syllables
+    
+    return reading_copies
+
+
 def get_syllables_for_token(token: Token):
     syllables = []
     if token.text.lower() in SPECIAL_CASES:
@@ -424,7 +483,8 @@ def get_syllables(reading):
         for s in token.syllables:
             s.id = i
             i += 1
-    return reading
+    more_readings = generate_synizesis(reading)
+    return more_readings
 
 
 def muta_cum_liquida(verse):
@@ -575,8 +635,10 @@ class Scanner:
             for v, br in zip(plain_verses, base_readings)
         ]
         for verse in verses:
+            new_readings = list()
             for reading in verse.readings:
-                get_syllables(reading)
+                new_readings.extend(get_syllables(reading))
+            verse.readings = new_readings
             parse_verse(verse)
             make_elisions(verse)
         return verses