chomsky.py 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133
  1. # Chomsky random text generator, version 1.1, Raymond Hettinger, 2005/09/13
  2. # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/440546
  3. """
  4. CHOMSKY is an aid to writing linguistic papers in the style
  5. of the great master. It is based on selected phrases taken
  6. from actual books and articles written by Noam Chomsky.
  7. Upon request, it assembles the phrases in the elegant
  8. stylistic patterns that Chomsky is noted for.
  9. To generate n sentences of linguistic wisdom, type
  10. (CHOMSKY n) -- for example
  11. (CHOMSKY 5) generates half a screen of linguistic truth.
  12. """
  13. leadins = """To characterize a linguistic level L,
  14. On the other hand,
  15. This suggests that
  16. It appears that
  17. Furthermore,
  18. We will bring evidence in favor of the following thesis:
  19. To provide a constituent structure for T(Z,K),
  20. From C1, it follows that
  21. For any transformation which is sufficiently diversified in \
  22. application to be of any interest,
  23. Analogously,
  24. Clearly,
  25. Note that
  26. Of course,
  27. Suppose, for instance, that
  28. Thus
  29. With this clarification,
  30. Conversely,
  31. We have already seen that
  32. By combining adjunctions and certain deformations,
  33. I suggested that these results would follow from the assumption that
  34. If the position of the trace in (99c) were only relatively \
  35. inaccessible to movement,
  36. However, this assumption is not correct, since
  37. Comparing these examples with their parasitic gap counterparts in \
  38. (96) and (97), we see that
  39. In the discussion of resumptive pronouns following (81),
  40. So far,
  41. Nevertheless,
  42. For one thing,
  43. Summarizing, then, we assume that
  44. A consequence of the approach just outlined is that
  45. Presumably,
  46. On our assumptions,
  47. It may be, then, that
  48. It must be emphasized, once again, that
  49. Let us continue to suppose that
  50. Notice, incidentally, that """
  51. # List of LEADINs to buy time.
  52. subjects = """ the notion of level of grammaticalness
  53. a case of semigrammaticalness of a different sort
  54. most of the methodological work in modern linguistics
  55. a subset of English sentences interesting on quite independent grounds
  56. the natural general principle that will subsume this case
  57. an important property of these three types of EC
  58. any associated supporting element
  59. the appearance of parasitic gaps in domains relatively inaccessible \
  60. to ordinary extraction
  61. the speaker-hearer's linguistic intuition
  62. the descriptive power of the base component
  63. the earlier discussion of deviance
  64. this analysis of a formative as a pair of sets of features
  65. this selectionally introduced contextual feature
  66. a descriptively adequate grammar
  67. the fundamental error of regarding functional notions as categorial
  68. relational information
  69. the systematic use of complex symbols
  70. the theory of syntactic features developed earlier"""
  71. # List of SUBJECTs chosen for maximum professorial macho.
  72. verbs = """can be defined in such a way as to impose
  73. delimits
  74. suffices to account for
  75. cannot be arbitrary in
  76. is not subject to
  77. does not readily tolerate
  78. raises serious doubts about
  79. is not quite equivalent to
  80. does not affect the structure of
  81. may remedy and, at the same time, eliminate
  82. is not to be considered in determining
  83. is to be regarded as
  84. is unspecified with respect to
  85. is, apparently, determined by
  86. is necessary to impose an interpretation on
  87. appears to correlate rather closely with
  88. is rather different from"""
  89. # List of VERBs chosen for autorecursive obfuscation.
  90. objects = """ problems of phonemic and morphological analysis.
  91. a corpus of utterance tokens upon which conformity has been defined \
  92. by the paired utterance test.
  93. the traditional practice of grammarians.
  94. the levels of acceptability from fairly high (e.g. (99a)) to virtual \
  95. gibberish (e.g. (98d)).
  96. a stipulation to place the constructions into these various categories.
  97. a descriptive fact.
  98. a parasitic gap construction.
  99. the extended c-command discussed in connection with (34).
  100. the ultimate standard that determines the accuracy of any proposed grammar.
  101. the system of base rules exclusive of the lexicon.
  102. irrelevant intervening contexts in selectional rules.
  103. nondistinctness in the sense of distinctive feature theory.
  104. a general convention regarding the forms of the grammar.
  105. an abstract underlying order.
  106. an important distinction in language use.
  107. the requirement that branching is not tolerated within the dominance \
  108. scope of a complex symbol.
  109. the strong generative capacity of the theory."""
  110. # List of OBJECTs selected for profound sententiousness.
  111. import textwrap, random
  112. from itertools import chain, islice
  113. def generate_chomsky(times=5, line_length=72):
  114. parts = []
  115. for part in (leadins, subjects, verbs, objects):
  116. phraselist = list(map(str.strip, part.splitlines()))
  117. random.shuffle(phraselist)
  118. parts.append(phraselist)
  119. output = chain(*islice(zip(*parts), 0, times))
  120. print(textwrap.fill(" ".join(output), line_length))
  121. if __name__ == "__main__":
  122. generate_chomsky()