HADOOP-11731. Rework the changelog and releasenotes (aw)
This commit is contained in:
parent
4d14816c26
commit
f383fd9b6c
@ -73,7 +73,7 @@ Where to run Maven from?
|
|||||||
----------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------
|
||||||
Maven build goals:
|
Maven build goals:
|
||||||
|
|
||||||
* Clean : mvn clean
|
* Clean : mvn clean [-Preleasedocs]
|
||||||
* Compile : mvn compile [-Pnative]
|
* Compile : mvn compile [-Pnative]
|
||||||
* Run tests : mvn test [-Pnative]
|
* Run tests : mvn test [-Pnative]
|
||||||
* Create JAR : mvn package
|
* Create JAR : mvn package
|
||||||
@ -84,7 +84,7 @@ Maven build goals:
|
|||||||
* Run clover : mvn test -Pclover [-DcloverLicenseLocation=${user.name}/.clover.license]
|
* Run clover : mvn test -Pclover [-DcloverLicenseLocation=${user.name}/.clover.license]
|
||||||
* Run Rat : mvn apache-rat:check
|
* Run Rat : mvn apache-rat:check
|
||||||
* Build javadocs : mvn javadoc:javadoc
|
* Build javadocs : mvn javadoc:javadoc
|
||||||
* Build distribution : mvn package [-Pdist][-Pdocs][-Psrc][-Pnative][-Dtar]
|
* Build distribution : mvn package [-Pdist][-Pdocs][-Psrc][-Pnative][-Dtar][-Preleasedocs]
|
||||||
* Change Hadoop version : mvn versions:set -DnewVersion=NEWVERSION
|
* Change Hadoop version : mvn versions:set -DnewVersion=NEWVERSION
|
||||||
|
|
||||||
Build options:
|
Build options:
|
||||||
@ -93,6 +93,7 @@ Maven build goals:
|
|||||||
* Use -Pdocs to generate & bundle the documentation in the distribution (using -Pdist)
|
* Use -Pdocs to generate & bundle the documentation in the distribution (using -Pdist)
|
||||||
* Use -Psrc to create a project source TAR.GZ
|
* Use -Psrc to create a project source TAR.GZ
|
||||||
* Use -Dtar to create a TAR with the distribution (using -Pdist)
|
* Use -Dtar to create a TAR with the distribution (using -Pdist)
|
||||||
|
* Use -Preleasedocs to include the changelog and release docs (requires Internet connectivity)
|
||||||
|
|
||||||
Snappy build options:
|
Snappy build options:
|
||||||
|
|
||||||
@ -203,7 +204,7 @@ Create source and binary distributions with native code and documentation:
|
|||||||
|
|
||||||
Create a local staging version of the website (in /tmp/hadoop-site)
|
Create a local staging version of the website (in /tmp/hadoop-site)
|
||||||
|
|
||||||
$ mvn clean site; mvn site:stage -DstagingDirectory=/tmp/hadoop-site
|
$ mvn clean site -Preleasedocs; mvn site:stage -DstagingDirectory=/tmp/hadoop-site
|
||||||
|
|
||||||
----------------------------------------------------------------------------------
|
----------------------------------------------------------------------------------
|
||||||
Installing Hadoop
|
Installing Hadoop
|
||||||
|
460
dev-support/releasedocmaker.py
Executable file
460
dev-support/releasedocmaker.py
Executable file
@ -0,0 +1,460 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from glob import glob
|
||||||
|
from optparse import OptionParser
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import urllib
|
||||||
|
try:
|
||||||
|
import json
|
||||||
|
except ImportError:
|
||||||
|
import simplejson as json
|
||||||
|
|
||||||
|
releaseVersion={}
|
||||||
|
namePattern = re.compile(r' \([0-9]+\)')
|
||||||
|
|
||||||
|
def clean(str):
|
||||||
|
return tableclean(re.sub(namePattern, "", str))
|
||||||
|
|
||||||
|
def formatComponents(str):
|
||||||
|
str = re.sub(namePattern, '', str).replace("'", "")
|
||||||
|
if str != "":
|
||||||
|
ret = str
|
||||||
|
else:
|
||||||
|
# some markdown parsers don't like empty tables
|
||||||
|
ret = "."
|
||||||
|
return clean(ret)
|
||||||
|
|
||||||
|
# convert to utf-8
|
||||||
|
# protect some known md metachars
|
||||||
|
# or chars that screw up doxia
|
||||||
|
def tableclean(str):
|
||||||
|
str=str.encode('utf-8')
|
||||||
|
str=str.replace("_","\_")
|
||||||
|
str=str.replace("\r","")
|
||||||
|
str=str.rstrip()
|
||||||
|
return str
|
||||||
|
|
||||||
|
# same thing as tableclean,
|
||||||
|
# except table metachars are also
|
||||||
|
# escaped as well as more
|
||||||
|
# things we don't want doxia to
|
||||||
|
# screw up
|
||||||
|
def notableclean(str):
|
||||||
|
str=tableclean(str)
|
||||||
|
str=str.replace("|","\|")
|
||||||
|
str=str.replace("<","\<")
|
||||||
|
str=str.replace(">","\>")
|
||||||
|
str=str.rstrip()
|
||||||
|
return str
|
||||||
|
|
||||||
|
def mstr(obj):
|
||||||
|
if (obj == None):
|
||||||
|
return ""
|
||||||
|
return unicode(obj)
|
||||||
|
|
||||||
|
def buildindex(master):
|
||||||
|
versions=reversed(sorted(glob("[0-9]*.[0-9]*.[0-9]*")))
|
||||||
|
with open("index.md","w") as indexfile:
|
||||||
|
for v in versions:
|
||||||
|
indexfile.write("* Apache Hadoop v%s\n" % (v))
|
||||||
|
for k in ("Changes","Release Notes"):
|
||||||
|
indexfile.write(" * %s\n" %(k))
|
||||||
|
indexfile.write(" * [Combined %s](%s/%s.%s.html)\n" \
|
||||||
|
% (k,v,k.upper().replace(" ",""),v))
|
||||||
|
if not master:
|
||||||
|
indexfile.write(" * [Hadoop Common %s](%s/%s.HADOOP.%s.html)\n" \
|
||||||
|
% (k,v,k.upper().replace(" ",""),v))
|
||||||
|
for p in ("HDFS","MapReduce","YARN"):
|
||||||
|
indexfile.write(" * [%s %s](%s/%s.%s.%s.html)\n" \
|
||||||
|
% (p,k,v,k.upper().replace(" ",""),p.upper(),v))
|
||||||
|
indexfile.close()
|
||||||
|
|
||||||
|
class Version:
|
||||||
|
"""Represents a version number"""
|
||||||
|
def __init__(self, data):
|
||||||
|
self.mod = False
|
||||||
|
self.data = data
|
||||||
|
found = re.match('^((\d+)(\.\d+)*).*$', data)
|
||||||
|
if (found):
|
||||||
|
self.parts = [ int(p) for p in found.group(1).split('.') ]
|
||||||
|
else:
|
||||||
|
self.parts = []
|
||||||
|
# backfill version with zeroes if missing parts
|
||||||
|
self.parts.extend((0,) * (3 - len(self.parts)))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if (self.mod):
|
||||||
|
return '.'.join([ str(p) for p in self.parts ])
|
||||||
|
return self.data
|
||||||
|
|
||||||
|
def __cmp__(self, other):
|
||||||
|
return cmp(self.parts, other.parts)
|
||||||
|
|
||||||
|
class Jira:
|
||||||
|
"""A single JIRA"""
|
||||||
|
|
||||||
|
def __init__(self, data, parent):
|
||||||
|
self.key = data['key']
|
||||||
|
self.fields = data['fields']
|
||||||
|
self.parent = parent
|
||||||
|
self.notes = None
|
||||||
|
self.incompat = None
|
||||||
|
self.reviewed = None
|
||||||
|
|
||||||
|
def getId(self):
|
||||||
|
return mstr(self.key)
|
||||||
|
|
||||||
|
def getDescription(self):
|
||||||
|
return mstr(self.fields['description'])
|
||||||
|
|
||||||
|
def getReleaseNote(self):
|
||||||
|
if (self.notes == None):
|
||||||
|
field = self.parent.fieldIdMap['Release Note']
|
||||||
|
if (self.fields.has_key(field)):
|
||||||
|
self.notes=mstr(self.fields[field])
|
||||||
|
else:
|
||||||
|
self.notes=self.getDescription()
|
||||||
|
return self.notes
|
||||||
|
|
||||||
|
def getPriority(self):
|
||||||
|
ret = ""
|
||||||
|
pri = self.fields['priority']
|
||||||
|
if(pri != None):
|
||||||
|
ret = pri['name']
|
||||||
|
return mstr(ret)
|
||||||
|
|
||||||
|
def getAssignee(self):
|
||||||
|
ret = ""
|
||||||
|
mid = self.fields['assignee']
|
||||||
|
if(mid != None):
|
||||||
|
ret = mid['displayName']
|
||||||
|
return mstr(ret)
|
||||||
|
|
||||||
|
def getComponents(self):
|
||||||
|
if (len(self.fields['components'])>0):
|
||||||
|
return ", ".join([ comp['name'] for comp in self.fields['components'] ])
|
||||||
|
else:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def getSummary(self):
|
||||||
|
return self.fields['summary']
|
||||||
|
|
||||||
|
def getType(self):
|
||||||
|
ret = ""
|
||||||
|
mid = self.fields['issuetype']
|
||||||
|
if(mid != None):
|
||||||
|
ret = mid['name']
|
||||||
|
return mstr(ret)
|
||||||
|
|
||||||
|
def getReporter(self):
|
||||||
|
ret = ""
|
||||||
|
mid = self.fields['reporter']
|
||||||
|
if(mid != None):
|
||||||
|
ret = mid['displayName']
|
||||||
|
return mstr(ret)
|
||||||
|
|
||||||
|
def getProject(self):
|
||||||
|
ret = ""
|
||||||
|
mid = self.fields['project']
|
||||||
|
if(mid != None):
|
||||||
|
ret = mid['key']
|
||||||
|
return mstr(ret)
|
||||||
|
|
||||||
|
def __cmp__(self,other):
|
||||||
|
selfsplit=self.getId().split('-')
|
||||||
|
othersplit=other.getId().split('-')
|
||||||
|
v1=cmp(selfsplit[0],othersplit[0])
|
||||||
|
if (v1!=0):
|
||||||
|
return v1
|
||||||
|
else:
|
||||||
|
if selfsplit[1] < othersplit[1]:
|
||||||
|
return True
|
||||||
|
elif selfsplit[1] > othersplit[1]:
|
||||||
|
return False
|
||||||
|
return False
|
||||||
|
|
||||||
|
def getIncompatibleChange(self):
|
||||||
|
if (self.incompat == None):
|
||||||
|
field = self.parent.fieldIdMap['Hadoop Flags']
|
||||||
|
self.reviewed=False
|
||||||
|
self.incompat=False
|
||||||
|
if (self.fields.has_key(field)):
|
||||||
|
if self.fields[field]:
|
||||||
|
for hf in self.fields[field]:
|
||||||
|
if hf['value'] == "Incompatible change":
|
||||||
|
self.incompat=True
|
||||||
|
if hf['value'] == "Reviewed":
|
||||||
|
self.reviewed=True
|
||||||
|
return self.incompat
|
||||||
|
|
||||||
|
def getReleaseDate(self,version):
|
||||||
|
for j in range(len(self.fields['fixVersions'])):
|
||||||
|
if self.fields['fixVersions'][j]==version:
|
||||||
|
return(self.fields['fixVersions'][j]['releaseDate'])
|
||||||
|
return None
|
||||||
|
|
||||||
|
class JiraIter:
|
||||||
|
"""An Iterator of JIRAs"""
|
||||||
|
|
||||||
|
def __init__(self, versions):
|
||||||
|
self.versions = versions
|
||||||
|
|
||||||
|
resp = urllib.urlopen("https://issues.apache.org/jira/rest/api/2/field")
|
||||||
|
data = json.loads(resp.read())
|
||||||
|
|
||||||
|
self.fieldIdMap = {}
|
||||||
|
for part in data:
|
||||||
|
self.fieldIdMap[part['name']] = part['id']
|
||||||
|
|
||||||
|
self.jiras = []
|
||||||
|
at=0
|
||||||
|
end=1
|
||||||
|
count=100
|
||||||
|
while (at < end):
|
||||||
|
params = urllib.urlencode({'jql': "project in (HADOOP,HDFS,MAPREDUCE,YARN) and fixVersion in ('"+"' , '".join([str(v).replace("-SNAPSHOT","") for v in versions])+"') and resolution = Fixed", 'startAt':at, 'maxResults':count})
|
||||||
|
resp = urllib.urlopen("https://issues.apache.org/jira/rest/api/2/search?%s"%params)
|
||||||
|
data = json.loads(resp.read())
|
||||||
|
if (data.has_key('errorMessages')):
|
||||||
|
raise Exception(data['errorMessages'])
|
||||||
|
at = data['startAt'] + data['maxResults']
|
||||||
|
end = data['total']
|
||||||
|
self.jiras.extend(data['issues'])
|
||||||
|
|
||||||
|
needaversion=False
|
||||||
|
for j in versions:
|
||||||
|
v=str(j).replace("-SNAPSHOT","")
|
||||||
|
if v not in releaseVersion:
|
||||||
|
needaversion=True
|
||||||
|
|
||||||
|
if needaversion is True:
|
||||||
|
for i in range(len(data['issues'])):
|
||||||
|
for j in range(len(data['issues'][i]['fields']['fixVersions'])):
|
||||||
|
if 'releaseDate' in data['issues'][i]['fields']['fixVersions'][j]:
|
||||||
|
releaseVersion[data['issues'][i]['fields']['fixVersions'][j]['name']]=\
|
||||||
|
data['issues'][i]['fields']['fixVersions'][j]['releaseDate']
|
||||||
|
|
||||||
|
self.iter = self.jiras.__iter__()
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
data = self.iter.next()
|
||||||
|
j = Jira(data, self)
|
||||||
|
return j
|
||||||
|
|
||||||
|
class Outputs:
|
||||||
|
"""Several different files to output to at the same time"""
|
||||||
|
|
||||||
|
def __init__(self, base_file_name, file_name_pattern, keys, params={}):
|
||||||
|
self.params = params
|
||||||
|
self.base = open(base_file_name%params, 'w')
|
||||||
|
self.others = {}
|
||||||
|
for key in keys:
|
||||||
|
both = dict(params)
|
||||||
|
both['key'] = key
|
||||||
|
self.others[key] = open(file_name_pattern%both, 'w')
|
||||||
|
|
||||||
|
def writeAll(self, pattern):
|
||||||
|
both = dict(self.params)
|
||||||
|
both['key'] = ''
|
||||||
|
self.base.write(pattern%both)
|
||||||
|
for key in self.others.keys():
|
||||||
|
both = dict(self.params)
|
||||||
|
both['key'] = key
|
||||||
|
self.others[key].write(pattern%both)
|
||||||
|
|
||||||
|
def writeKeyRaw(self, key, str):
|
||||||
|
self.base.write(str)
|
||||||
|
if (self.others.has_key(key)):
|
||||||
|
self.others[key].write(str)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.base.close()
|
||||||
|
for fd in self.others.values():
|
||||||
|
fd.close()
|
||||||
|
|
||||||
|
def writeList(self, mylist):
|
||||||
|
for jira in sorted(mylist):
|
||||||
|
line = '| [%s](https://issues.apache.org/jira/browse/%s) | %s | %s | %s | %s | %s |\n' \
|
||||||
|
% (notableclean(jira.getId()), notableclean(jira.getId()),
|
||||||
|
notableclean(jira.getSummary()),
|
||||||
|
notableclean(jira.getPriority()),
|
||||||
|
formatComponents(jira.getComponents()),
|
||||||
|
notableclean(jira.getReporter()),
|
||||||
|
notableclean(jira.getAssignee()))
|
||||||
|
self.writeKeyRaw(jira.getProject(), line)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = OptionParser(usage="usage: %prog --version VERSION [--version VERSION2 ...]",
|
||||||
|
epilog=
|
||||||
|
"Markdown-formatted CHANGES and RELEASENOTES files will be stored in a directory"
|
||||||
|
" named after the highest version provided.")
|
||||||
|
parser.add_option("-v", "--version", dest="versions",
|
||||||
|
action="append", type="string",
|
||||||
|
help="versions in JIRA to include in releasenotes", metavar="VERSION")
|
||||||
|
parser.add_option("-m","--master", dest="master", action="store_true",
|
||||||
|
help="only create the master, merged project files")
|
||||||
|
parser.add_option("-i","--index", dest="index", action="store_true",
|
||||||
|
help="build an index file")
|
||||||
|
(options, args) = parser.parse_args()
|
||||||
|
|
||||||
|
if (options.versions == None):
|
||||||
|
options.versions = []
|
||||||
|
|
||||||
|
if (len(args) > 2):
|
||||||
|
options.versions.append(args[2])
|
||||||
|
|
||||||
|
if (len(options.versions) <= 0):
|
||||||
|
parser.error("At least one version needs to be supplied")
|
||||||
|
|
||||||
|
versions = [ Version(v) for v in options.versions ];
|
||||||
|
versions.sort();
|
||||||
|
|
||||||
|
maxVersion = str(versions[-1])
|
||||||
|
|
||||||
|
jlist = JiraIter(versions)
|
||||||
|
version = maxVersion
|
||||||
|
|
||||||
|
if version in releaseVersion:
|
||||||
|
reldate=releaseVersion[version]
|
||||||
|
else:
|
||||||
|
reldate="Unreleased"
|
||||||
|
|
||||||
|
if not os.path.exists(version):
|
||||||
|
os.mkdir(version)
|
||||||
|
|
||||||
|
if options.master:
|
||||||
|
reloutputs = Outputs("%(ver)s/RELEASENOTES.%(ver)s.md",
|
||||||
|
"%(ver)s/RELEASENOTES.%(key)s.%(ver)s.md",
|
||||||
|
[], {"ver":maxVersion, "date":reldate})
|
||||||
|
choutputs = Outputs("%(ver)s/CHANGES.%(ver)s.md",
|
||||||
|
"%(ver)s/CHANGES.%(key)s.%(ver)s.md",
|
||||||
|
[], {"ver":maxVersion, "date":reldate})
|
||||||
|
else:
|
||||||
|
reloutputs = Outputs("%(ver)s/RELEASENOTES.%(ver)s.md",
|
||||||
|
"%(ver)s/RELEASENOTES.%(key)s.%(ver)s.md",
|
||||||
|
["HADOOP","HDFS","MAPREDUCE","YARN"], {"ver":maxVersion, "date":reldate})
|
||||||
|
choutputs = Outputs("%(ver)s/CHANGES.%(ver)s.md",
|
||||||
|
"%(ver)s/CHANGES.%(key)s.%(ver)s.md",
|
||||||
|
["HADOOP","HDFS","MAPREDUCE","YARN"], {"ver":maxVersion, "date":reldate})
|
||||||
|
|
||||||
|
relhead = '# Hadoop %(key)s %(ver)s Release Notes\n\n' \
|
||||||
|
'These release notes cover new developer and user-facing incompatibilities, features, and major improvements.\n\n'
|
||||||
|
|
||||||
|
chhead = '# Hadoop Changelog\n\n' \
|
||||||
|
'## Release %(ver)s - %(date)s\n'\
|
||||||
|
'\n'
|
||||||
|
|
||||||
|
reloutputs.writeAll(relhead)
|
||||||
|
choutputs.writeAll(chhead)
|
||||||
|
|
||||||
|
incompatlist=[]
|
||||||
|
buglist=[]
|
||||||
|
improvementlist=[]
|
||||||
|
newfeaturelist=[]
|
||||||
|
subtasklist=[]
|
||||||
|
tasklist=[]
|
||||||
|
testlist=[]
|
||||||
|
otherlist=[]
|
||||||
|
|
||||||
|
for jira in sorted(jlist):
|
||||||
|
if jira.getIncompatibleChange():
|
||||||
|
incompatlist.append(jira)
|
||||||
|
elif jira.getType() == "Bug":
|
||||||
|
buglist.append(jira)
|
||||||
|
elif jira.getType() == "Improvement":
|
||||||
|
improvementlist.append(jira)
|
||||||
|
elif jira.getType() == "New Feature":
|
||||||
|
newfeaturelist.append(jira)
|
||||||
|
elif jira.getType() == "Sub-task":
|
||||||
|
subtasklist.append(jira)
|
||||||
|
elif jira.getType() == "Task":
|
||||||
|
tasklist.append(jira)
|
||||||
|
elif jira.getType() == "Test":
|
||||||
|
testlist.append(jira)
|
||||||
|
else:
|
||||||
|
otherlist.append(jira)
|
||||||
|
|
||||||
|
line = '* [%s](https://issues.apache.org/jira/browse/%s) | *%s* | **%s**\n' \
|
||||||
|
% (notableclean(jira.getId()), notableclean(jira.getId()), notableclean(jira.getPriority()),
|
||||||
|
notableclean(jira.getSummary()))
|
||||||
|
|
||||||
|
if (jira.getIncompatibleChange()) and (len(jira.getReleaseNote())==0):
|
||||||
|
reloutputs.writeKeyRaw(jira.getProject(),"\n---\n\n")
|
||||||
|
reloutputs.writeKeyRaw(jira.getProject(), line)
|
||||||
|
line ='\n**WARNING: No release note provided for this incompatible change.**\n\n'
|
||||||
|
print 'WARNING: incompatible change %s lacks release notes.' % (notableclean(jira.getId()))
|
||||||
|
reloutputs.writeKeyRaw(jira.getProject(), line)
|
||||||
|
|
||||||
|
if (len(jira.getReleaseNote())>0):
|
||||||
|
reloutputs.writeKeyRaw(jira.getProject(),"\n---\n\n")
|
||||||
|
reloutputs.writeKeyRaw(jira.getProject(), line)
|
||||||
|
line ='\n%s\n\n' % (tableclean(jira.getReleaseNote()))
|
||||||
|
reloutputs.writeKeyRaw(jira.getProject(), line)
|
||||||
|
|
||||||
|
reloutputs.writeAll("\n\n")
|
||||||
|
reloutputs.close()
|
||||||
|
|
||||||
|
choutputs.writeAll("### INCOMPATIBLE CHANGES:\n\n")
|
||||||
|
choutputs.writeAll("| JIRA | Summary | Priority | Component | Reporter | Contributor |\n")
|
||||||
|
choutputs.writeAll("|:---- |:---- | :--- |:---- |:---- |:---- |\n")
|
||||||
|
choutputs.writeList(incompatlist)
|
||||||
|
|
||||||
|
choutputs.writeAll("\n\n### NEW FEATURES:\n\n")
|
||||||
|
choutputs.writeAll("| JIRA | Summary | Priority | Component | Reporter | Contributor |\n")
|
||||||
|
choutputs.writeAll("|:---- |:---- | :--- |:---- |:---- |:---- |\n")
|
||||||
|
choutputs.writeList(newfeaturelist)
|
||||||
|
|
||||||
|
choutputs.writeAll("\n\n### IMPROVEMENTS:\n\n")
|
||||||
|
choutputs.writeAll("| JIRA | Summary | Priority | Component | Reporter | Contributor |\n")
|
||||||
|
choutputs.writeAll("|:---- |:---- | :--- |:---- |:---- |:---- |\n")
|
||||||
|
choutputs.writeList(improvementlist)
|
||||||
|
|
||||||
|
choutputs.writeAll("\n\n### BUG FIXES:\n\n")
|
||||||
|
choutputs.writeAll("| JIRA | Summary | Priority | Component | Reporter | Contributor |\n")
|
||||||
|
choutputs.writeAll("|:---- |:---- | :--- |:---- |:---- |:---- |\n")
|
||||||
|
choutputs.writeList(buglist)
|
||||||
|
|
||||||
|
choutputs.writeAll("\n\n### TESTS:\n\n")
|
||||||
|
choutputs.writeAll("| JIRA | Summary | Priority | Component | Reporter | Contributor |\n")
|
||||||
|
choutputs.writeAll("|:---- |:---- | :--- |:---- |:---- |:---- |\n")
|
||||||
|
choutputs.writeList(testlist)
|
||||||
|
|
||||||
|
choutputs.writeAll("\n\n### SUB-TASKS:\n\n")
|
||||||
|
choutputs.writeAll("| JIRA | Summary | Priority | Component | Reporter | Contributor |\n")
|
||||||
|
choutputs.writeAll("|:---- |:---- | :--- |:---- |:---- |:---- |\n")
|
||||||
|
choutputs.writeList(subtasklist)
|
||||||
|
|
||||||
|
choutputs.writeAll("\n\n### OTHER:\n\n")
|
||||||
|
choutputs.writeAll("| JIRA | Summary | Priority | Component | Reporter | Contributor |\n")
|
||||||
|
choutputs.writeAll("|:---- |:---- | :--- |:---- |:---- |:---- |\n")
|
||||||
|
choutputs.writeList(otherlist)
|
||||||
|
choutputs.writeList(tasklist)
|
||||||
|
|
||||||
|
choutputs.writeAll("\n\n")
|
||||||
|
choutputs.close()
|
||||||
|
|
||||||
|
if options.index:
|
||||||
|
buildindex(options.master)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@ -1,274 +0,0 @@
|
|||||||
#!/usr/bin/python
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
from optparse import OptionParser
|
|
||||||
import httplib
|
|
||||||
import urllib
|
|
||||||
import cgi
|
|
||||||
try:
|
|
||||||
import json
|
|
||||||
except ImportError:
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
|
|
||||||
namePattern = re.compile(r' \([0-9]+\)')
|
|
||||||
|
|
||||||
def clean(str):
|
|
||||||
return quoteHtml(re.sub(namePattern, "", str))
|
|
||||||
|
|
||||||
def formatComponents(str):
|
|
||||||
str = re.sub(namePattern, '', str).replace("'", "")
|
|
||||||
if str != "":
|
|
||||||
ret = "(" + str + ")"
|
|
||||||
else:
|
|
||||||
ret = ""
|
|
||||||
return quoteHtml(ret)
|
|
||||||
|
|
||||||
def quoteHtml(str):
|
|
||||||
return cgi.escape(str).encode('ascii', 'xmlcharrefreplace')
|
|
||||||
|
|
||||||
def mstr(obj):
|
|
||||||
if (obj == None):
|
|
||||||
return ""
|
|
||||||
return unicode(obj)
|
|
||||||
|
|
||||||
class Version:
|
|
||||||
"""Represents a version number"""
|
|
||||||
def __init__(self, data):
|
|
||||||
self.mod = False
|
|
||||||
self.data = data
|
|
||||||
found = re.match('^((\d+)(\.\d+)*).*$', data)
|
|
||||||
if (found):
|
|
||||||
self.parts = [ int(p) for p in found.group(1).split('.') ]
|
|
||||||
else:
|
|
||||||
self.parts = []
|
|
||||||
# backfill version with zeroes if missing parts
|
|
||||||
self.parts.extend((0,) * (3 - len(self.parts)))
|
|
||||||
|
|
||||||
def decBugFix(self):
|
|
||||||
self.mod = True
|
|
||||||
self.parts[2] -= 1
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
if (self.mod):
|
|
||||||
return '.'.join([ str(p) for p in self.parts ])
|
|
||||||
return self.data
|
|
||||||
|
|
||||||
def __cmp__(self, other):
|
|
||||||
return cmp(self.parts, other.parts)
|
|
||||||
|
|
||||||
class Jira:
|
|
||||||
"""A single JIRA"""
|
|
||||||
|
|
||||||
def __init__(self, data, parent):
|
|
||||||
self.key = data['key']
|
|
||||||
self.fields = data['fields']
|
|
||||||
self.parent = parent
|
|
||||||
self.notes = None
|
|
||||||
|
|
||||||
def getId(self):
|
|
||||||
return mstr(self.key)
|
|
||||||
|
|
||||||
def getDescription(self):
|
|
||||||
return mstr(self.fields['description'])
|
|
||||||
|
|
||||||
def getReleaseNote(self):
|
|
||||||
if (self.notes == None):
|
|
||||||
field = self.parent.fieldIdMap['Release Note']
|
|
||||||
if (self.fields.has_key(field)):
|
|
||||||
self.notes=mstr(self.fields[field])
|
|
||||||
else:
|
|
||||||
self.notes=self.getDescription()
|
|
||||||
return self.notes
|
|
||||||
|
|
||||||
def getPriority(self):
|
|
||||||
ret = ""
|
|
||||||
pri = self.fields['priority']
|
|
||||||
if(pri != None):
|
|
||||||
ret = pri['name']
|
|
||||||
return mstr(ret)
|
|
||||||
|
|
||||||
def getAssignee(self):
|
|
||||||
ret = ""
|
|
||||||
mid = self.fields['assignee']
|
|
||||||
if(mid != None):
|
|
||||||
ret = mid['displayName']
|
|
||||||
return mstr(ret)
|
|
||||||
|
|
||||||
def getComponents(self):
|
|
||||||
return " , ".join([ comp['name'] for comp in self.fields['components'] ])
|
|
||||||
|
|
||||||
def getSummary(self):
|
|
||||||
return self.fields['summary']
|
|
||||||
|
|
||||||
def getType(self):
|
|
||||||
ret = ""
|
|
||||||
mid = self.fields['issuetype']
|
|
||||||
if(mid != None):
|
|
||||||
ret = mid['name']
|
|
||||||
return mstr(ret)
|
|
||||||
|
|
||||||
def getReporter(self):
|
|
||||||
ret = ""
|
|
||||||
mid = self.fields['reporter']
|
|
||||||
if(mid != None):
|
|
||||||
ret = mid['displayName']
|
|
||||||
return mstr(ret)
|
|
||||||
|
|
||||||
def getProject(self):
|
|
||||||
ret = ""
|
|
||||||
mid = self.fields['project']
|
|
||||||
if(mid != None):
|
|
||||||
ret = mid['key']
|
|
||||||
return mstr(ret)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class JiraIter:
|
|
||||||
"""An Iterator of JIRAs"""
|
|
||||||
|
|
||||||
def __init__(self, versions):
|
|
||||||
self.versions = versions
|
|
||||||
|
|
||||||
resp = urllib.urlopen("https://issues.apache.org/jira/rest/api/2/field")
|
|
||||||
data = json.loads(resp.read())
|
|
||||||
|
|
||||||
self.fieldIdMap = {}
|
|
||||||
for part in data:
|
|
||||||
self.fieldIdMap[part['name']] = part['id']
|
|
||||||
|
|
||||||
self.jiras = []
|
|
||||||
at=0
|
|
||||||
end=1
|
|
||||||
count=100
|
|
||||||
while (at < end):
|
|
||||||
params = urllib.urlencode({'jql': "project in (HADOOP,HDFS,MAPREDUCE,YARN) and fixVersion in ('"+"' , '".join(versions)+"') and resolution = Fixed", 'startAt':at, 'maxResults':count})
|
|
||||||
resp = urllib.urlopen("https://issues.apache.org/jira/rest/api/2/search?%s"%params)
|
|
||||||
data = json.loads(resp.read())
|
|
||||||
if (data.has_key('errorMessages')):
|
|
||||||
raise Exception(data['errorMessages'])
|
|
||||||
at = data['startAt'] + data['maxResults']
|
|
||||||
end = data['total']
|
|
||||||
self.jiras.extend(data['issues'])
|
|
||||||
|
|
||||||
self.iter = self.jiras.__iter__()
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def next(self):
|
|
||||||
data = self.iter.next()
|
|
||||||
j = Jira(data, self)
|
|
||||||
return j
|
|
||||||
|
|
||||||
class Outputs:
|
|
||||||
"""Several different files to output to at the same time"""
|
|
||||||
|
|
||||||
def __init__(self, base_file_name, file_name_pattern, keys, params={}):
|
|
||||||
self.params = params
|
|
||||||
self.base = open(base_file_name%params, 'w')
|
|
||||||
self.others = {}
|
|
||||||
for key in keys:
|
|
||||||
both = dict(params)
|
|
||||||
both['key'] = key
|
|
||||||
self.others[key] = open(file_name_pattern%both, 'w')
|
|
||||||
|
|
||||||
def writeAll(self, pattern):
|
|
||||||
both = dict(self.params)
|
|
||||||
both['key'] = ''
|
|
||||||
self.base.write(pattern%both)
|
|
||||||
for key in self.others.keys():
|
|
||||||
both = dict(self.params)
|
|
||||||
both['key'] = key
|
|
||||||
self.others[key].write(pattern%both)
|
|
||||||
|
|
||||||
def writeKeyRaw(self, key, str):
|
|
||||||
self.base.write(str)
|
|
||||||
if (self.others.has_key(key)):
|
|
||||||
self.others[key].write(str)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
self.base.close()
|
|
||||||
for fd in self.others.values():
|
|
||||||
fd.close()
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = OptionParser(usage="usage: %prog [options] [USER-ignored] [PASSWORD-ignored] [VERSION]")
|
|
||||||
parser.add_option("-v", "--version", dest="versions",
|
|
||||||
action="append", type="string",
|
|
||||||
help="versions in JIRA to include in releasenotes", metavar="VERSION")
|
|
||||||
parser.add_option("--previousVer", dest="previousVer",
|
|
||||||
action="store", type="string",
|
|
||||||
help="previous version to include in releasenotes", metavar="VERSION")
|
|
||||||
|
|
||||||
(options, args) = parser.parse_args()
|
|
||||||
|
|
||||||
if (options.versions == None):
|
|
||||||
options.versions = []
|
|
||||||
|
|
||||||
if (len(args) > 2):
|
|
||||||
options.versions.append(args[2])
|
|
||||||
|
|
||||||
if (len(options.versions) <= 0):
|
|
||||||
parser.error("At least one version needs to be supplied")
|
|
||||||
|
|
||||||
versions = [ Version(v) for v in options.versions];
|
|
||||||
versions.sort();
|
|
||||||
|
|
||||||
maxVersion = str(versions[-1])
|
|
||||||
if(options.previousVer == None):
|
|
||||||
options.previousVer = str(versions[0].decBugFix())
|
|
||||||
print >> sys.stderr, "WARNING: no previousVersion given, guessing it is "+options.previousVer
|
|
||||||
|
|
||||||
list = JiraIter(options.versions)
|
|
||||||
version = maxVersion
|
|
||||||
outputs = Outputs("releasenotes.%(ver)s.html",
|
|
||||||
"releasenotes.%(key)s.%(ver)s.html",
|
|
||||||
["HADOOP","HDFS","MAPREDUCE","YARN"], {"ver":maxVersion, "previousVer":options.previousVer})
|
|
||||||
|
|
||||||
head = '<META http-equiv="Content-Type" content="text/html; charset=UTF-8">\n' \
|
|
||||||
'<title>Hadoop %(key)s %(ver)s Release Notes</title>\n' \
|
|
||||||
'<STYLE type="text/css">\n' \
|
|
||||||
' H1 {font-family: sans-serif}\n' \
|
|
||||||
' H2 {font-family: sans-serif; margin-left: 7mm}\n' \
|
|
||||||
' TABLE {margin-left: 7mm}\n' \
|
|
||||||
'</STYLE>\n' \
|
|
||||||
'</head>\n' \
|
|
||||||
'<body>\n' \
|
|
||||||
'<h1>Hadoop %(key)s %(ver)s Release Notes</h1>\n' \
|
|
||||||
'These release notes include new developer and user-facing incompatibilities, features, and major improvements. \n' \
|
|
||||||
'<a name="changes"/>\n' \
|
|
||||||
'<h2>Changes since Hadoop %(previousVer)s</h2>\n' \
|
|
||||||
'<ul>\n'
|
|
||||||
|
|
||||||
outputs.writeAll(head)
|
|
||||||
|
|
||||||
for jira in list:
|
|
||||||
line = '<li> <a href="https://issues.apache.org/jira/browse/%s">%s</a>.\n' \
|
|
||||||
' %s %s reported by %s and fixed by %s %s<br>\n' \
|
|
||||||
' <b>%s</b><br>\n' \
|
|
||||||
' <blockquote>%s</blockquote></li>\n' \
|
|
||||||
% (quoteHtml(jira.getId()), quoteHtml(jira.getId()), clean(jira.getPriority()), clean(jira.getType()).lower(),
|
|
||||||
quoteHtml(jira.getReporter()), quoteHtml(jira.getAssignee()), formatComponents(jira.getComponents()),
|
|
||||||
quoteHtml(jira.getSummary()), quoteHtml(jira.getReleaseNote()))
|
|
||||||
outputs.writeKeyRaw(jira.getProject(), line)
|
|
||||||
|
|
||||||
outputs.writeAll("</ul>\n</body></html>\n")
|
|
||||||
outputs.close()
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
|
|
@ -21,6 +21,8 @@ Trunk (Unreleased)
|
|||||||
|
|
||||||
HADOOP-11553. Formalize the shell API (aw)
|
HADOOP-11553. Formalize the shell API (aw)
|
||||||
|
|
||||||
|
HADOOP-11731. Rework the changelog and releasenotes (aw)
|
||||||
|
|
||||||
NEW FEATURES
|
NEW FEATURES
|
||||||
|
|
||||||
HADOOP-6590. Add a username check for hadoop sub-commands (John Smith via aw)
|
HADOOP-6590. Add a username check for hadoop sub-commands (John Smith via aw)
|
||||||
|
@ -479,6 +479,7 @@
|
|||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.rat</groupId>
|
<groupId>org.apache.rat</groupId>
|
||||||
<artifactId>apache-rat-plugin</artifactId>
|
<artifactId>apache-rat-plugin</artifactId>
|
||||||
@ -901,6 +902,56 @@
|
|||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
</profile>
|
</profile>
|
||||||
|
|
||||||
|
<profile>
|
||||||
|
<id>releasedocs</id>
|
||||||
|
<activation>
|
||||||
|
<activeByDefault>false</activeByDefault>
|
||||||
|
</activation>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
|
<artifactId>exec-maven-plugin</artifactId>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>releasedocs</id>
|
||||||
|
<phase>pre-site</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>exec</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<executable>python</executable>
|
||||||
|
<workingDirectory>src/site/markdown/release/</workingDirectory>
|
||||||
|
<requiresOnline>true</requiresOnline>
|
||||||
|
<arguments>
|
||||||
|
<argument>${basedir}/../../dev-support/releasedocmaker.py</argument>
|
||||||
|
<argument>--version</argument>
|
||||||
|
<argument>${project.version}</argument>
|
||||||
|
<argument>--index</argument>
|
||||||
|
</arguments>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<artifactId>maven-clean-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<filesets>
|
||||||
|
<fileset>
|
||||||
|
<directory>src/site/markdown/release</directory>
|
||||||
|
<includes>
|
||||||
|
<include>${project.version}</include>
|
||||||
|
</includes>
|
||||||
|
<followSymlinks>false</followSymlinks>
|
||||||
|
</fileset>
|
||||||
|
</filesets>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</profile>
|
||||||
|
|
||||||
</profiles>
|
</profiles>
|
||||||
</project>
|
</project>
|
||||||
|
|
||||||
|
@ -159,13 +159,9 @@
|
|||||||
</menu>
|
</menu>
|
||||||
|
|
||||||
<menu name="Reference" inherit="top">
|
<menu name="Reference" inherit="top">
|
||||||
<item name="Release Notes" href="hadoop-project-dist/hadoop-common/releasenotes.html"/>
|
<item name="Changelog and Release Notes" href="hadoop-project-dist/hadoop-common/release/index.html"/>
|
||||||
<item name="Java API docs" href="api/index.html"/>
|
<item name="Java API docs" href="api/index.html"/>
|
||||||
<item name="Unix Shell API" href="hadoop-project-dist/hadoop-common/UnixShellAPI.html"/>
|
<item name="Unix Shell API" href="hadoop-project-dist/hadoop-common/UnixShellAPI.html"/>
|
||||||
<item name="Common CHANGES.txt" href="hadoop-project-dist/hadoop-common/CHANGES.txt"/>
|
|
||||||
<item name="HDFS CHANGES.txt" href="hadoop-project-dist/hadoop-hdfs/CHANGES.txt"/>
|
|
||||||
<item name="MapReduce CHANGES.txt" href="hadoop-project-dist/hadoop-mapreduce/CHANGES.txt"/>
|
|
||||||
<item name="YARN CHANGES.txt" href="hadoop-project-dist/hadoop-yarn/CHANGES.txt"/>
|
|
||||||
<item name="Metrics" href="hadoop-project-dist/hadoop-common/Metrics.html"/>
|
<item name="Metrics" href="hadoop-project-dist/hadoop-common/Metrics.html"/>
|
||||||
</menu>
|
</menu>
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user