<?xml version="1.0" encoding="utf-8" ?>
<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:syn="http://purl.org/rss/1.0/modules/syndication/" xmlns="http://purl.org/rss/1.0/">




    



<channel rdf:about="https://cis-india.org/search_rss">
  <title>Centre for Internet and Society</title>
  <link>https://cis-india.org</link>
  
  <description>
    
            These are the search results for the query, showing results 61 to 75.
        
  </description>
  
  
  
  
  <image rdf:resource="https://cis-india.org/logo.png"/>

  <items>
    <rdf:Seq>
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/news/roundtable-on-intermediary-liability-and-gender-based-violence-at-the-digital-citizen-summit-2018"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/zero-draft-of-content-removal-best-practices-white-paper"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/news/webinar-on-the-draft-intermediary-guidelines-amendment-rules"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/news/roundtable-discussion-on-intermediary-liability"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/a2k/news/medianama-roundtables-on-intermediary-liability-rules"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/response-to-the-draft-of-the-information-technology-intermediary-guidelines-amendment-rules-2018"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/news/2019-international-asia-conference"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/a-look-at-two-problematic-provisions-of-the-draft-anti-trafficking-bill"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/news/india-the-new-front-line-in-the-global-struggle-for-internet-freedom"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/news/beauty-blog-creates-furore"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/it-amendment-act-69-rules-draft-and-final-version-comparison"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/gni-and-iamai-launch-interactive-slideshow-exploring-impact-of-indias-internet-laws"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/chilling-effects-on-free-expression-on-internet"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/news/articles-latimes-com-mark-magnier-aug-23-2012-india-limits-social-media-after-civil-unrest"/>
        
        
            <rdf:li rdf:resource="https://cis-india.org/internet-governance/blog/rebuttal-dit-press-release-intermediaries"/>
        
    </rdf:Seq>
  </items>

</channel>


    <item rdf:about="https://cis-india.org/internet-governance/news/roundtable-on-intermediary-liability-and-gender-based-violence-at-the-digital-citizen-summit-2018">
    <title>Roundtable on Intermediary Liability and Gender Based Violence at the Digital Citizen Summit, 2018</title>
    <link>https://cis-india.org/internet-governance/news/roundtable-on-intermediary-liability-and-gender-based-violence-at-the-digital-citizen-summit-2018</link>
    <description>
        &lt;b&gt;Akriti Bopanna and Ambika Tandon conducted a panel on 'Gender and Intermediary Liability' at the Digital Citizen Summit, hosted by the Digital Empowerment Foundation, on November 1, 2018 at India International Centre, New Delhi.&lt;/b&gt;
        &lt;p class="moz-quote-pre"&gt;Ambika was the moderator for the panel, with Apar Gupta, Jyoti Pandey, Amrita Vasudevan, Anja Kovacs, and Japleen Pasricha as speakers. Click to read the &lt;a class="external-link" href="http://cis-india.org/internet-governance/files/concept-note-digital-citizen-summit"&gt;concept note&lt;/a&gt; and the &lt;a class="external-link" href="http://cis-india.org/internet-governance/files/dcs-2018-agenda"&gt;agenda&lt;/a&gt;.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/news/roundtable-on-intermediary-liability-and-gender-based-violence-at-the-digital-citizen-summit-2018'&gt;https://cis-india.org/internet-governance/news/roundtable-on-intermediary-liability-and-gender-based-violence-at-the-digital-citizen-summit-2018&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Admin</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    

   <dc:date>2018-11-07T02:55:40Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/zero-draft-of-content-removal-best-practices-white-paper">
    <title>Zero Draft of Content Removal Best Practices White Paper </title>
    <link>https://cis-india.org/internet-governance/blog/zero-draft-of-content-removal-best-practices-white-paper</link>
    <description>
        &lt;b&gt;EFF and CIS Intermediary Liability Project is aimed towards the creation of a set of principles for intermediary liability in consultation with groups of Internet-focused NGOs and the academic community.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The draft paper has been created to frame the discussion and will be made available for public comments and feedback. The draft document and the views represented here are not representative of the positions of the organisations involved in the drafting.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a class="external-link" href="http://tinyurl.com/k2u83ya"&gt;http://tinyurl.com/k2u83ya&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;3 September  2014&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;Introduction&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;The purpose of this white paper is to frame the discussion at several meetings between groups of Internet-focused NGOs that will lead to the creation of a set of principles for intermediary liability.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The principles that develop from this white paper are intended as a civil society contribution to help guide companies, regulators and courts, as they continue to build out the legal landscape in which online intermediaries operate. One aim of these principles is to move towards greater consistency with regards to the laws that apply to intermediaries and their application in practice.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;There are three general approaches to intermediary liability that have been discussed in much of the recent work in this area, including CDT’s 2012 report called “Shielding the Messengers: Protecting Platforms for Expression and Innovation.” The CDT’s 2012 report divides approaches to intermediary liability into three models: 1. Expansive Protections Against Liability for Intermediaries, 2. Conditional Safe Harbor from Liability, 3. Blanket or Strict Liability for Intermediaries.&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt1"&gt;&lt;sup&gt;[1]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;This white paper argues in the alternative that (a) the “expansive protections against liability” model is preferable, but likely not possible given the current state of play in the legal and policy space (b) therefore the white paper supports “conditional safe harbor from liability” operating via a ‘notice-to-notice’ regime if possible, and a ‘notice and action’ regime if ‘notice-to-notice’ is deemed impossible, and finally (c) all of the other principles discussed in this white paper should apply to whatever model for intermediary liability is adopted unless those principles are facially incompatible with the model that is finally adopted.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;As further general background, this white paper works from the position that there are three general types of online intermediaries- Internet Service Providers (ISPs), search engines, and social networks. As outlined in the recent draft UNESCO Report (from which this white paper draws extensively);&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;“With many kinds of companies operating many kinds of products and services, it is important to clarify what constitutes an intermediary. In a 2010 report, the Organization for Economic Co-operation and Development (OECD) explains that Internet intermediaries “bring together or facilitate transactions between third parties on the Internet. They give access to, host, transmit and index content, products and services originated by third parties on the Internet or provide Internet-based services to third parties.”&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Most definitions of intermediaries explicitly exclude content producers. The freedom of expression advocacy group Article 19 distinguishes intermediaries from “those individuals or organizations who are responsible for producing information in the first place and posting it online.”  Similarly, the Center for Democracy and Technology explains that “these entities facilitate access to content created by others.”  The OECD emphasizes “their role as ‘pure’ intermediaries between third parties,” excluding “activities where service providers give access to, host, transmit or index content or services that they themselves originate.”  These views are endorsed in some laws and court rulings.  In other words, publishers and other media that create and disseminate original content are not intermediaries. Examples of such media entities include a news website that publishes articles written and edited by its staff, or a digital video subscription service that hires people to produce videos and disseminates them to subscribers.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;For the purpose of this case study we will maintain that intermediaries offer services that host, index, or facilitate the transmission and sharing of content created by others. For example, Internet Service Providers (ISPs) connect a user’s device, whether it is a laptop, a mobile phone or something else, to the network of networks known as the Internet. Once a user is connected to the Internet, search engines make a portion of the World Wide Web accessible by allowing individuals to search their database. Search engines are often an essential go-between between websites and Internet users. Social networks connect individual Internet users by allowing them to exchange messages, photos, videos, as well as by allowing them to post content to their network of contacts, or the public at large. Web hosting providers, in turn, make it possible for websites to be published and to be accessed online.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt2"&gt;&lt;sup&gt;[2]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;h2 style="text-align: justify; "&gt;General Principles for ISP Governance - Content      Removals&lt;/h2&gt;
&lt;p style="text-align: justify; "&gt;The discussion that follows below outlines nine principles to guide companies, government, and civil society in the development of best practices related to the regulation of online content through intermediaries, as norms, policies, and laws develop in the coming years. The nine principles are: Transparency, Consistency, Clarity, Mindful Community Policy Making, Necessity and Proportionality in Content Restrictions, Privacy, Access to Remedy, Accountability, and Due Process in both Legal and Private Enforcement. Each principle contains subsections that expand upon the theme of the principle to cover more specific issues related to the rights and responsibilities of online intermediaries, government, civil society, and users.&lt;/p&gt;
&lt;h3 style="text-align: justify; "&gt;Principle I: Transparency&lt;/h3&gt;
&lt;p style="text-align: justify; "&gt;“Transparency enables users’ right to privacy and right to freedom of expression. Transparency of laws, policies, practices, decisions, rationale, and outcomes related to privacy and restrictions allow users to make informed choices with respect to their actions and speech online. As such - both governments and companies have a responsibility in ensuring that the public is informed through transparency initiatives.” &lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt3"&gt;&lt;sup&gt;[3]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Government Transparency&lt;/b&gt;&lt;/p&gt;
&lt;ul style="text-align: justify; "&gt;
&lt;li&gt;In general, governments should publish transparency      reports:&lt;/li&gt;
&lt;/ul&gt;
&lt;p style="text-align: justify; "&gt;As part of the democratic process, the citizens of each country have a right to know how their government is applying its laws, and a right to provide feedback about the government’s legal interpretations of its laws. Thus, all governments should be required to publish online transparency reports that provide information about all requests issued by any branch or agency of government for the removal or restriction of online content. Further, governments should allow for the submission of comments and suggestions by a webform hosted on the same webpage where that government’s transparency report is hosted. There should also be some legal mechanism that requires the government to look at the feedback provided by its citizens, ensure that relevant feedback is passed along to legislative bodies, and provide for action to be taken on the citizen-provided feedback where appropriate. Finally, and where possible, the raw data that constitutes each government’s transparency report should be made available online, for free, in a common file format such as .csv, so that civil society may have easy access to it for research purposes.&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;Governments should be more transparent about content      orders that they impose on ISPs&lt;br /&gt;The legislative process proceeds most effectively when the government knows how the laws that it creates are applied in practice and is able to receive feedback from the public about how those laws should change further, or remain the same. Relatedly, regulation of the Internet is most effective when the legislative and judicial branches are aware of what the other is doing. For all of these reasons, governments should publish information about all of the court orders and executive requests for content removals that they send to online intermediaries. Publishing all of this information in one place necessarily requires that some single entity within the government collects the information, which will have the benefits of giving the government a holistic view of how it is regulating the internet, encouraging dialogue between different branches of government about how best to create and enforce internet content regulation, and encouraging dialogue between the government and its citizens about the laws that govern internet content and their application. &lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Governments should make the compliance requirements      they impose on ISPs public&lt;br /&gt;Each government should maintain a public website that publishes as complete a picture as possible of the content removal requests made by any branch of that government, including the judicial branch. The availability of a public website of this type will further many of the goals and objectives discussed elsewhere in this section. The website should be biased towards high levels of detail about each request and towards disclosure that requests were made, subject only to limited exceptions for compelling public policy reasons, where the disclosure bias conflicts directly with another law, or where disclosure would reveal a user’s PII. The information should be published periodically, ideally more than once a year. The general principle should be: the more information made available, the better. On the same website where a government publishes its ‘Transparency Report,’ that government should attempt to provide a plain-language description of its various laws related to online content, to provide users notice about what content is lawful vs. unlawful, as well as to show how the laws that it enacts in the Internet space fit together. Further, and as discussed in section “b,” infra, government should provide citizens with an online feedback mechanism so that they may participate in the legislative process as it applies to online content.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Governments should give their citizens a way to provide      input on these policies&lt;br /&gt;Private citizens should have the right to provide feedback on the balancing between their civil liberties and other public policies such as security that their government engages in on their behalf. If and when these policies and the compliance requirements they impose on online intermediaries are made publicly available online, there should also be a feedback mechanism built into the site where this information is published. This public feedback mechanism could take a number of different forms, like, for example, a webform that allowed users to indicate their level of satisfaction with prevailing policy choices by choosing amongst several radio buttons, while also providing open text fields to allow the user to submit clarifying comments and specific suggestions. In order to be effective, this online feedback mechanism would have to be accompanied by some sort of legal and budgetary apparatus that would ensure that the feedback was monitored and given some minimum level of deference in the discussions and meetings that led to new policies being created.&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;Government should meet users concerned about its content policies in the online domain. Internet users, as citizens of both the internet and the country their country of origin, have a natural interest in defining and defending their civil liberties online; government should meet them there to extend the democratic process to the Internet. Denying Internet users a voice in the policymaking processes that determine their rights undermines government credibility and negatively influences users’ ability to freely share information online. As such, content policies should be posted in general terms online and users should have the ability to provide input on those policies online.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;ISP Transparency&lt;/b&gt;&lt;br /&gt;“The transparency practices of a company impact users’ freedom expression by providing insight into the scope of restriction that is taking in place in specific jurisdiction. Key areas of transparency for companies include: specific restrictions, aggregate numbers related to restrictions, company imposed regulations on content, and transparency of applicable law and regulation that the service provider must abide by.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt4"&gt;&lt;sup&gt;[4]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;“Disclosure by service providers of notices received and actions taken can provide an important check against abuse. In addition to providing valuable data for assessing the value and effectiveness of a N&amp;amp;A system, creating the expectation that notices will be disclosed may help deter fraudulent or otherwise unjustified notices. In contrast, without transparency, Internet users may remain unaware that content they have posted or searched for has been removed pursuant due to a notice of alleged illegality. Requiring notices to be submitted to a central publication site would provide the most benefit, enabling patterns of poor quality or abusive notices to be readily exposed.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt5"&gt;&lt;sup&gt;[5]&lt;/sup&gt;&lt;/a&gt; Therefore, ISPs at all levels should publish transparency reports that include:&lt;/p&gt;
&lt;ul style="text-align: justify; "&gt;
&lt;li&gt;Government Requests&lt;/li&gt;
&lt;/ul&gt;
&lt;p style="text-align: justify; "&gt;All requests from government agencies and courts should be published in a periodic transparency report, accessible on the intermediary’s website, that publishes information about the requests the intermediary received and what the intermediary did with them in the highest level of detail that is legally possible. The more information that is provided about each request, the better the understanding that the public will have about how laws that affect their rights online are being applied. That said, steps should be taken to prevent the disclosure of personal information in relation to the publication of transparency reports. Beyond redaction of personal information, however, the maximum amount of information about each request should be published, subject as well to the (ideally minimal) restrictions imposed by applicable law. A thorough Transparency Report published by an ISP or online intermediary should include information about the following categories of requests:&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;Police and/or Executive Requests&lt;br /&gt;This category includes all requests to the intermediary from an agency that is wholly a part of the national government; from police departments, to intelligence agencies, to school boards from small towns. Surfacing information about all requests from any part of the government helps to avoid corruption and/or inappropriate exercises of governmental power by reminding all government officials, regardless of their rank or seniority, that information about the requests they submit to online intermediaries is subject to public scrutiny. &lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Court Orders&lt;br /&gt;This category includes all orders issued by courts and signed by a judicial officer. It can include ex-parte orders, default judgments, court orders directed at an online intermediary, or court orders directed at a third party presented to the intermediary as evidence in support of a removal request. To the extent legally possible, detailed information should be published about these court orders detailing the type of court order each request was, its constituent elements, and the actions(s) that the intermediary took in response to it. All personally identifying information should be redacted from any court orders that are published by the intermediary as part of a transparency report before publication.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;First Party&lt;br /&gt;Information about court orders should be further broken down into two groups; first party and third party. First party court orders are orders directed at the online intermediary in an adversarial proceeding to which the online intermediary was a party.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Third Party&lt;br /&gt;As mentioned above, ‘third party’ refers to court orders that are not directed at the online intermediary, but rather a third party such as an individual user who posted an allegedly defamatory remark on the intermediary’s platform. If the user who obtains a court order approaches an online intermediary seeking removal of content with a court order directed at the poster of, say, the defamatory content, and the intermediary decides to remove the content in response to the request, the online intermediary that decided to perform the takedown should publish a record of that removal. To be accepted by an intermediary, third party court orders should be issued by a court of appropriate jurisdiction after an adversarial legal proceeding, contain a certified and specific statement that certain content is unlawful, and specifically identify the content that the court has found to be unlawful, by specific, permalinked URL where possible.&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;This type of court order should be broken out separately from court orders directed at the applicable online intermediary in companies’ transparency reports because merely providing aggregate numbers that do not distinguish between the two types gives an inaccurate impression to users that a government is attempting to censor more content than it actually is. The idea of including first party court orders to remove content as a subcategory of ‘government requests’ is that a government’s judiciary speaks on behalf of the government, making determinations about what is permitted under the laws of that country. This analogy does not hold for court orders directed at third parties- when the court made its determination of legality on the content in question, it did not contemplate that the intermediary would remove the content. As such, the court likely did not weigh the relevant public interest and policy factors that would include the importance of freedom of expression or the precedential value of its decision. Therefore, the determination does not fairly reflect an attempt by the government to censor content and should not be considered as such.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Instead, and especially considering that these third party court order may be the basis for a number of content removals, third party court orders should be counted separately and presented with some published explanation in the company’s transparency report as to what they are and why the company has decided it should removed content pursuant to its receipt of one.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Private-Party Requests&lt;/b&gt;&lt;br /&gt;Private-party requests are requests to remove content that are not issued by a government agency or accompanied by a court order. Some examples of private party requests include copyright complaints submitted pursuant to the Digital Millennium Copyright Act or complaints based on the laws of specific countries, such as laws banning holocaust denial in Germany.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;Policy/TOS Enforcement&lt;/b&gt;&lt;br /&gt;To give users a complete picture of the content that is being removed from the platforms that they use, corporate transparency reports should also provide information about the content that the intermediary removes pursuant to its own policies or terms of service, though there may not be a legal requirement to do so.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;b&gt;User Data Requests&lt;/b&gt;&lt;br /&gt;While this white paper is squarely focused on liability for content posted online and best practices for deciding when and how content should be removed from online services, corporate transparency reports should also provide information about requests for user data from executive agencies, courts, and others.&lt;/p&gt;
&lt;h3 style="text-align: justify; "&gt;Principle II: Consistency&lt;/h3&gt;
&lt;li style="text-align: justify; "&gt;Legal requirements for ISPs should be consistent, based      on a global legal framework that establishes baseline limitations on legal      immunity&lt;br /&gt;Broad variation amongst the legal regimes of the countries in which online intermediaries operate increases compliance costs for companies and may discourage them from offering their services in some countries due to the high costs of localized compliance. Reducing the number of speech platforms that citizens have access to limits their ability to express themselves. Therefore, to ensure that citizens of a particular country have access to a robust range of speech platforms, each country should work to harmonize the requirements that it imposes upon online intermediaries with the requirements of other countries. While a certain degree of variation between what is permitted in one country as compared to another is inevitable, all countries should agree on certain limitations to intermediary liability, such as the following: &lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Conduits should be immune from claims about content      that they neither created nor modified&lt;br /&gt;As noted in the 2011 Joint Declaration on Freedom of Expression and the Internet, “[n]o one who simply provides technical Internet services such as providing access, or searching for, or transmission or caching of information, should be liable for content generated by others, which is disseminated using those services, as long as they do not specifically intervene in that content or refuse to obey a court order to remove that content, where they have the capacity to do so (‘mere conduit principle’).”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt6"&gt;&lt;sup&gt;[6]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Court orders should be required for the removal of      content that is related to speech, such as defamation removal requests&lt;br /&gt;In the Center for Democracy and Technology’s Additional Responses Regarding Notice and Action, CDT outlines the case against allowing notice and action procedures to apply to defamation removal requests. They write: &lt;/li&gt;
&lt;p style="text-align: justify; "&gt;“Uniform notice-and-action procedures should not apply horizontally to all types of illegal content. In particular, CDT believes notice-and-takedown is inappropriate for defamation and other areas of law requiring complex legal and factual questions that make private notices especially subject to abuse. Blocking or removing content on the basis of mere allegations of illegality raises serious concerns for free expression and access to information. Hosts are likely to err on the side of caution and comply with most if not all notices they receive, because evaluating notices is burdensome and declining to comply may jeopardize their protection from liability. The risk of legal content being taken down is especially high in cases where assessing the illegality of the content would require detailed factual analysis and careful legal judgments that balance competing fundamental rights and interests. Intermediaries will be extremely reluctant to exercise their own judgment when the legal issues are unclear, and it will be easy for any party submitting a notice to claim a good faith belief that the content in question is unlawful. In short, the murkier the legal analysis, the greater the potential for abuse.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;To reduce this risk, removal of or disablement of access to content based on unadjudicated allegations of illegality (i.e., notices from private parties) should be limited to cases where the content at issue is manifestly illegal – and then only with necessary safeguards against abuse as described above.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;CDT believes that online free expression is best served by narrowing what is considered manifestly illegal and subject to takedown upon private notice. With proper safeguards against abuse, for example, notice-and-action can be an appropriate policy for addressing online copyright infringement. Copyright is an area of law where there is reasonable international consensus regarding what is illegal and where much infringement is straightforward. There can be difficult questions at the margins – for example concerning the applicability of limitations and exceptions such as “fair use” – but much online infringement is not disputable.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Quite different considerations apply to the extension of notice-and-action procedures to allegations of defamation or other illegal content. Other areas of law, including defamation, routinely require far more difficult factual and legal determinations. There is greater potential for abuse of notice-and-action where illegality is less manifest and more disputable. If private notices are sufficient to have allegedly defamatory content removed, for example, any person unhappy about something that has been written about him or her would have the ability and incentive to make an allegation of defamation, creating a significant potential for unjustified notices that harm free expression. This and other areas where illegality is more disputable require different approaches to notice and action. In the case of defamation, CDT believes “notice” for purposes of removing or disabling access to content should come only from a competent court after full adjudication.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;In cases where it would be inappropriate to remove or disable access to content based on untested allegations of illegality, service providers receiving allegations of illegal content may be able to take alternative actions in response to notices. Forwarding notices to the content provider or preserving data necessary to facilitate the initiation of legal proceedings, for example, can pose less risk to content providers’ free expression rights, provided there is sufficient process to allow the content provider to challenge the allegations and assert his or her rights, including the right to speak anonymously.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt7"&gt;&lt;sup&gt;[7]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;h3 style="text-align: justify; "&gt;Principle III: Clarity&lt;/h3&gt;
&lt;li style="text-align: justify; "&gt;All notices that request the removal of content should      be clear and meet certain minimum requirements&lt;br /&gt;The Center for Democracy and Technology outlined requirements for clear notices in a notice and action system in response a European Commission public comment period on a revised notice and action regime.&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt8"&gt;&lt;sup&gt;[8]&lt;/sup&gt;&lt;/a&gt; They write:&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;“Notices should include the following features:&lt;/p&gt;
&lt;ol style="text-align: justify; "&gt;
&lt;li&gt;Specificity. Notices should be required to specify the      exact location of the material – such as a specific URL – in order to be      valid. This is perhaps the most important requirement, in that it allows      hosts to take targeted action against identified illegal material without      having to engage in burdensome search or monitoring. Notices that demand      the removal of particular content wherever it appears on a site without      specifying any location(s) are not sufficiently precise to enable targeted      action. &lt;/li&gt;
&lt;li&gt;Description of alleged illegal content. Notices should      be required to include a detailed description of the specific content      alleged to be illegal and to make specific reference to the law allegedly      being violated. In the case of copyright, the notice should identify the      specific work or works claimed to be infringed. &lt;/li&gt;
&lt;li&gt;Contact details. Notices should be required to contain      contact information for the sender. This facilitates assessment of      notices’ validity, feedback to senders regarding invalid notices,      sanctions for abusive notices, and communication or legal action between      the sending party and the poster of the material in question. &lt;/li&gt;
&lt;li&gt;Standing: Notices should be issued only by or on behalf      of the party harmed by the content. For copyright, this would be the      rightsholder or an agent acting on the rightsholderʼs behalf. For child      sexual abuse images, a suitable issuer of notice would be a law      enforcement agency or a child abuse hotline with expertise in assessing      such content. For terrorism content, only government agencies would have      standing to submit notice. &lt;/li&gt;
&lt;li&gt;Certification: A sender of a notice should be required      to attest under legal penalty to a good-faith belief that the content      being complained of is in fact illegal; that the information contained in      the notice is accurate; and, if applicable, that the sender either is the      harmed party or is authorized to act on behalf of the harmed party. This      kind of formal certification requirement signals to notice-senders that      they should view misrepresentation or inaccuracies on notices as akin to      making false or inaccurate statements to a court or administrative body. &lt;/li&gt;
&lt;li&gt;Consideration of limitations, exceptions, and defenses:      Senders should be required to certify that they have considered in good faith      whether any limitations, exceptions, or defenses apply to the material in      question. This is particularly relevant for copyright and other areas of      law in which exceptions are specifically described in law. &lt;/li&gt;
&lt;li&gt;An effective appeal and counter-notice mechanism. A      notice-and-action regime should include counter-notice procedures so that      content providers can contest mistaken and abusive notices and have their      content reinstated if its removal was wrongful. &lt;/li&gt;
&lt;li&gt;Penalties for unjustified notices. Senders of erroneous      or abusive notices should face possible sanctions. In the US, senders may      face penalties for knowingly misrepresenting that content is infringing,      but the standard for “knowingly misrepresenting” is quite high and the      provision has rarely been invoked.  A better approach might be to use      a negligence standard, whereby a sender could be held liable for damages      or attorneys’ fees for making negligent misrepresentations (or for      repeatedly making negligent misrepresentations). In addition, the notice-and-action      system should allow content hosts to ignore notices from senders with an      established record of sending erroneous or abusive notices or allow them      to demand more information or assurances in notices from those who have in      the past submitted erroneous notices. (For example, hosts might be deemed      within the safe harbor if they require repeat abusers to specifically      certify that they have actually examined the alleged infringing content      before sending a notice).”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt9"&gt;&lt;sup&gt;[9]&lt;/sup&gt;&lt;/a&gt; &lt;/li&gt;
&lt;/ol&gt;
&lt;li style="text-align: justify; "&gt;All ISPs should publish their content removal policies      online and keep them current as they evolve&lt;br /&gt;The UNESCO report states, by way of background, that “[c]ontent restriction practices based on Terms of Service are opaque. How companies remove content based on Terms of Service violations is more opaque than their handling of content removals based on requests from authorized authorities. When content is removed from a platform based on company policy, [our] research found that all companies provide a generic notice of this restriction to the user, but do not provide the reason for the restriction. Furthermore, most companies do not provide notice to the public that the content has been removed. In addition, companies are inconsistently open about removal of accounts and their reasons for doing so.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt10"&gt;&lt;sup&gt;[10]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;There are legitimate reasons why an ISP may want to have policies that permit less content, and a narrower range of content, than is technically permitted under the law, such as maintaining a product that appeals to families. However, if a company is going to go beyond the minimal legal requirements in terms of content that it must restrict, the company should have clear policies that are published online and kept up-to-date to provide its users notice of what content is and is not permitted on the company’s platform. Notice to the user about the types of content that are permitted encourages her to speak freely and helps her to understand why content that she posted was taken down if it must be taken down for violating a company policy.&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;When content is removed, a clear notice should be      provided in the product that explains in simple terms that content has      been removed and why&lt;br /&gt;This subsection works in conjunction with “ii,” above. If content is removed for any reason, either pursuant to a legal request or because of a violation of company policy, a user should be able to learn that content was removed if they try to access it. Requiring an on-screen message that explains that content has been removed and why is the post-takedown accompaniment to the pre-takedown published online policy of the online intermediary: both work together to show the user what types of content are and are not permitted on each online platform. Explaining to users why content has been removed in sufficient detail may also spark their curiosity as to the laws or policies that caused the content to be removed, resulting in increased civic engagement in the internet law and policy space, and a community of citizens that demands that the companies and governments it interacts with are more responsive to how it thinks content regulation should work in the online context.&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;The UNESCO report provides the following example of how Google provides notice to its users when a search result is removed, which includes a link to a page hosted by Chilling Effects:&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt11"&gt;&lt;sup&gt;[11]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;“When search results are removed in response to government or copyright holder demands, a notice describing the number of results removed and the reasons for their removal is displayed to users (see screenshot below) and a copy of the request to the independent non-proft organization ChillingEffects.org, which archives and publishes the request.  When possible the company also contacts the website’s owners.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt12"&gt;&lt;sup&gt;[12]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;This is an example of the message that is displayed when Google removes a search result pursuant to a copyright complaint.&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt13"&gt;&lt;sup&gt;[13]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;Requirements that governments impose on intermediaries      should be as clear and unambiguous as possible&lt;br /&gt;Imposing liability on internet intermediaries without providing clear guidance as to the precise type of content that is not lawful and the precise requirements of a legally sufficient notice encourages intermediaries to over-remove content. As Article 19 noted in its 2013 report on intermediary liability:&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;“International bodies have also criticized ‘notice and takedown’ procedures as they lack a clear legal basis. For example, the 2011 OSCE report on Freedom of Expression on the internet highlighted that: Liability provisions for service providers are not always clear and complex notice and takedown provisions exist for content removal from the Internet within a number of participating States. Approximately 30 participating States have laws based on the EU E-Commerce Directive. However, the EU Directive provisions rather than aligning state level policies, created differences in interpretation during the national implementation process. These differences emerged once the national courts applied the provisions.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;These procedures have also been criticized for being unfair. Rather than obtaining a court order requiring the host to remove unlawful material (which, in principle at least, would involve an independent judicial determination that the material is indeed unlawful), hosts are required to act merely on the say-so of a private party or public body. This is problematic because hosts tend to err on the side of caution and therefore take down material that may be perfectly legitimate and lawful. For example, in his report, the UN Special Rapporteur on freedom of expression noted:&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;[W]hile a notice-and-takedown system is one way to prevent intermediaries from actively engaging in or encouraging unlawful behavior on their services, it is subject to abuse by both State and private actors. Users who are notiﬁed by the service provider that their content has been ﬂagged as unlawful often has little recourse or few resources to challenge the takedown. Moreover, given that intermediaries may still be held ﬁnancially or in some cases criminally liable if they do not remove content upon receipt of notiﬁcation by users regarding unlawful content, they are inclined to err on the side of safety by overcensoring potentially illegal content. Lack of transparency in the intermediaries’ decision-making process also often obscures discriminatory practices or political pressure affecting the companies’ decisions. Furthermore, intermediaries, as private entities, are not best placed to make the determination of whether a particular content is illegal, which requires careful balancing of competing interests and consideration of defenses.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt14"&gt;&lt;sup&gt;[14]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Considering the above, if liability is to be imposed on intermediaries for certain types of unlawful content, the legal requirements that outline what is unlawful content and how to report it must be clear. Lack of clarity in this area will result in over-removal of content by rational intermediaries that want to minimize their legal exposure and compliance costs. Over-removal of content is at odds with the goals of freedom of expression.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The UNESCO Report made a similar recommendation, stating that; “Governments need to ensure that legal frameworks and company policies are in place to address issues arising out of intermediary liability. These legal frameworks and policies should be contextually adapted and be consistent with a human rights framework and a commitment to due process and fair dealing. Legal and regulatory frameworks should also be precise and grounded in a clear understanding of the technology they are meant to address, removing legal uncertainty that would provide opportunity for abuse.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt15"&gt;&lt;sup&gt;[15]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Similarly, the 2011 Joint Declaration on Freedom of Expression and the Internet states:&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;“Consideration should be given to insulating fully other intermediaries, including those mentioned in the preamble, from liability for content generated by others under the same conditions as in paragraph 2(a). At a minimum, intermediaries should not be required to monitor user-generated content and should not be subject to extrajudicial content takedown rules which fail to provide sufficient protection for freedom of expression (which is the case with many of the ‘notice and takedown’ rules currently being applied).”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt16"&gt;&lt;sup&gt;[16]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;h3 style="text-align: justify; "&gt;Principle IV: Mindful Community Policy Making&lt;/h3&gt;
&lt;p style="text-align: justify; "&gt;“Laws and regulations as well as corporate policies are more likely to be compatible with freedom of expression if they are developed in consultation with all affected stakeholders – particularly those whose free expression rights are known to be at risk.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt17"&gt;&lt;sup&gt;[17]&lt;/sup&gt;&lt;/a&gt; To be effective, policies should be created through a multi-stakeholder consultation process that gives voice to the communities most at risk of being targeted for the information they share online. Further, both companies and governments should embed an ‘outreach to at-risk communities’ step into both legislative and policymaking processes to be especially sure that their voices are heard. Finally, civil society should work to ensure that all relevant stakeholders have a voice in both the creation and revision of policies that affect online intermediaries. In the context of corporate policymaking, civil society can use strategies from activist investing to encourage investors to make the human rights and freedom of expression policies of Internet companies’ part of the calculus that investors use to decide where to place their money. Considering the above;&lt;/p&gt;
&lt;ol style="text-align: justify; "&gt;
&lt;li style="text-align: justify; "&gt;Human rights impact assessments, considering the impact      of the proposed law or policy on various communities from the perspectives      of gender, sexuality, sexual preference, ethnicity, religion, and freedom      of expression, should be required before:&lt;/li&gt;
&lt;li&gt;New laws are written that govern content issues affecting      ISPs or conduct that occurs primarily online&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;“Protection of online freedom of expression will be strengthened if governments carry out human rights impact assessments to determine how proposed laws or regulations will affect Internet users’ freedom of expression domestically and globally.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt18"&gt;&lt;sup&gt;[18]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;/ol&gt;
&lt;li style="text-align: justify; "&gt;Intermediaries enact new policies&lt;br /&gt;“Protection of online freedom of expression will be strengthened if companies carry out human rights impact assessments to determine how their policies, practices, and business operations affect Internet users’ freedom of expression. This assessment process should be anchored in robust engagement with stakeholders whose freedom of expression rights are at greatest risk online, as well as stakeholders who harbor concerns about other human rights affected by online speech.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt19"&gt;&lt;sup&gt;[19]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Multi-stakeholder consultation processes should precede      any new legislation that will apply to content issues affecting online      intermediaries or online conduct&lt;br /&gt;“Laws and regulations as well as corporate policies are more likely to be compatible with freedom of expression if they are developed in consultation with all affected stakeholders – particularly those whose free expression rights are known to be at risk.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt20"&gt;&lt;sup&gt;[20]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Civil society and public interest groups should      encourage responsible investment in companies who implement policies that      reflect best practices for internet intermediaries&lt;br /&gt;“Over the past thirty years, responsible investors have played a powerful role in incentivizing companies to improve environmental sustainability, supply chain labor practices, and respect for human rights of communities where companies physically operate. Responsible investors can also play a powerful role in incentivizing companies to improve their policies and practices affecting freedom of expression and privacy by developing metrics and criteria for evaluating companies on these issues in the same way that they evaluate companies on other “environmental, social, and governance” criteria.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt21"&gt;&lt;sup&gt;[21]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;h3 style="text-align: justify; "&gt;Principle V: Necessity and Proportionality in Content      Restriction&lt;/h3&gt;
&lt;li style="text-align: justify; "&gt;Content should only be restricted when there is a legal      basis for doing so, or the removal is performed in accordance with a      clear, published policy of the ISP&lt;br /&gt;As CDT outlined in its 2012 intermediary liability report, “[a]ctions required of intermediaries must be narrowly tailored and proportionate, to protect the fundamental rights of Internet users. Any actions that a safe-harbor regime requires intermediaries to take must be evaluated in terms of the principle of proportionality and their impact on Internet users’ fundamental rights, including rights to freedom of expression, access to information, and protection of personal data. Laws that encourage intermediaries to take down or block certain content have the potential to impair online expression or access to information. Such laws must therefore ensure that the actions they call for are proportional to a legitimate aim, no more restrictive than is required for achievement of the aim, and effective for achieving the aim. In particular, intermediary action requirements should be narrowly drawn, targeting specific unlawful content rather than entire websites or other Internet resources that may support both lawful and unlawful uses.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt22"&gt;&lt;sup&gt;[22]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;When content must be restricted, it should be      restricted in the most minimal way possible (i.e., prefer domain removals      to IP-blocking)&lt;br /&gt;There are a number of different ways that access to content can be restricted. Examples include hard deletion of the content from all of a company’s servers, blocking the download of an app or other software program in a particular country, blocking the content on all IP addresses affiliated with a particular country (“IP-Blocking”), removing the content from a particular domain of a product (i.e., removing from a link from the .fr version of a search engine that remains accessible on the .com version), blocking content from a ‘version’ of an online product that is accessible through a ‘country’ or ‘language’ setting on that product, or some combination of the last three options (i.e., an online product that directs the user to a version of the product based on the country that their IP address is coming from, but where the user can alter a URL or manipulate a drop-down menu to show her a different ‘country version’ of the product, providing access to content that may otherwise be inaccessible). &lt;/li&gt;
&lt;p style="text-align: justify; "&gt;While almost all of the different types of content restrictions described above can be circumvented by technical means such as the use of proxies, IP-cloaking, or Tor, the average internet user does not know that these techniques exist, much less how to use them. Of the different types of content restrictions described above, a domain removal, for example, is easier for an individual user to circumvent than IP-Blocked content because you only have to change the URL of the product you are using to, i.e. “.com” to see content that has been locally restricted. To get around an IP-block, you would have to be sufficiently savvy to employ a proxy or cloak your true IP address.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Therefore, the technical means used to restrict access to controversial content has a direct impact on the magnitude of the actual restriction on speech. The more restrictive the technical removal method, the fewer people that will have access to that content. To preserve access to lawful content, online intermediaries should choose the least restrictive means of complying with removal requests, especially when the removal request is based on the law of a particular country that makes certain content unlawful that is not unlawful in other countries. Further, when building new products and services, intermediaries should built in removal capability that minimally restricts access to controversial content.&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;If content is restricted due to its illegality in a      particular country, the geographical scope of the content restriction      should be as minimal as possible&lt;br /&gt;Building on the discussion in “ii,” supra, a user should be able to access content that is lawful in her country even if it is not lawful in another country. Different countries have different laws and it is often difficult for intermediaries to determine how to effectively respond to requests and reconcile the inherent conflicts that result. For example, content that denies the holocaust is illegal in certain countries, but not in others. If an intermediary receives a request to remove content based on the laws of a particular country and determines that it will comply because the content is not lawful in that country, it should not restrict access to the content such that it cannot be accessed by users in other countries where the content is lawful. To respond to a request based on the law of a particular country by blocking access to that content for users around the world, or even users of more than one country, essentially allows for extraterritorial application of the laws of the country that the request came from. While it is preferable to standardize and limit the legal requirements imposed on online intermediaries throughout the world, to the extent that this is not possible, the next-best option is to limit the application of laws that are interpreted to declare certain content unlawful to the users that live in that country. Therefore, intermediaries should choose the technical means of content restriction that is most narrowly tailored to limit the geographical scope and impact of the removal.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;The ability of conduits (telecommunications/internet      service providers) to filter content should be minimized to the extent      technically and legally possible&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;The 2011 Joint Declaration on Freedom of Expression and the Internet made the following points about the dangers of allowing filtering technology:&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;“Mandatory blocking of entire websites, IP addresses, ports, network protocols or types of uses (such as social networking) is an extreme measure – analogous to banning a newspaper or broadcaster – which can only be justified in accordance with international standards, for example where necessary to protect children against sexual abuse.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Content filtering systems which are imposed by a government or commercial service provider and which are not end-user controlled are a form of prior censorship and are not justifiable as a restriction on freedom of expression.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Products designed to facilitate end-user filtering should be required to be accompanied by clear information to end-users about how they work and their potential pitfalls in terms of over-inclusive filtering.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt23"&gt;&lt;sup&gt;[23]&lt;/sup&gt;&lt;/a&gt;&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;In short, filtering at the conduit level is a blunt instrument that should be avoided whenever possible. Similar to how conduits should not be legally responsible for content that they neither host nor modify (the ‘mere conduit’ rule discussed supra), conduits should technically restrict their ability to filter content such that it would be inefficient for government agencies to contact them to have content filtered. Mere conduits are not able to assess the context surrounding the controversial content that they are asked to remove and are therefore not the appropriate party to receive takedown requests. Further, when mere conduits have the technical ability to filter content, they open themselves to pressure from government to exercise that capability. Therefore, mere conduits should limit or not build in the capability to filter content.&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;Notice and notice, or notice and judicial takedown,      should be preferred to notice and takedown, which should be preferred to      unilateral removal&lt;br /&gt;Mechanisms for content removal that involve intermediaries acting without any oversight or accountability, or those which only respond to the interests of the party requesting removal, are unlikely to do a very good job at balancing public and private interests. A much better balance is likely to be struck through a mechanism where power is distributed between the parties, and/or where an independent and accountable oversight mechanism exists.&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;Considered in this way, there is a continuum of content removal mechanisms that ranges from those are the least balanced and accountable, and those that are more so.  The least accountable is the unilateral removal of content by the intermediary without legal compulsion in response to a request received, without affording the uploader of the content the right to be heard or access to remedy.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Notice and takedown mechanisms fit next along the continuum, provided that they incorporate, as the DMCA attempts to do, an effective appeal and counter-notice mechanism. However where notice and takedown falls down is that the cost and incentive structure is weighted towards removal of content in the case of doubt or dispute, resulting in more content being taken down and staying down than would be socially optimal.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;A better balance is likely to be struck by a “notice and notice” regime, which provides strong social incentives for those whose content is reported to be unlawful to remove the content, but does not legally compel them to do so. If legal compulsion is required, a court order must be separately obtained.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Canada is an example of a jurisdiction with a notice and notice regime, though limited to copyright content disputes. Although this regime is now established in legislation, it formalizes a previous voluntary regime, whereby major ISPs would forward copyright infringement notifications received from rightsholders to subscribers, but without removing any content and without releasing subscriber data to the rightsholders absent a court order. Under the new legislation additional record-keeping requirements are imposed on ISPs, but otherwise the essential features of the regime remain unchanged.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Analysis of data collected during this voluntary regime indicates that it has been effective in changing the behavior of allegedly infringing subscribers.  A 2010 study by the Entertainment Software Association of Canada (ESAC) found that 71% of notice recipients did not infringe again, whereas a similar 2011 study by Canadian ISP Rogers found 68% only received one notice, and 89% received no more than two notices, with only 1 subscriber in 800,000 receiving numerous notices.&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt24"&gt;&lt;sup&gt;[24]&lt;/sup&gt;&lt;/a&gt; However, in cases where a subscriber has a strong good faith belief that the notice they received was wrong, there is no risk to them in disregarding the erroneous notice – a feature that does not apply to notice and takedown.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Another similar way in which public and private interests can be balanced is through a notice and judicial takedown regime, whereby the rightsholder who issues a notice about offending content must have it assessed by an independent judicial (or perhaps administrative) authority before the intermediary will respond by taking the content down.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;An example of this is found in Chile, again limited to the case of copyright.&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt25"&gt;&lt;sup&gt;[25]&lt;/sup&gt;&lt;/a&gt; In response to its Free Trade Agreement with the United States, the system introduced in 2010 is broadly similar to the DMCA, with the critical difference that intermediaries are not required to take material down in order to benefit from a liability safe harbor, until such time as a court order for removal of the material is made. Responsibility for evaluating the copyright claims made is therefore shifted from intermediaries onto the courts.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Although this requirement does impose a burden on the rightsholder, this serves a purpose by disincentivizing the issue of automated or otherwise unjustified notices that are more likely to restrict or chill freedom of expression.  In cases where there is no serious dispute about the legality of the content, it is unlikely that the lawsuit would be defended. In any case, the legislation authorizes the court to issue a preliminary injunction on an ex parte basis, on condition of payment of a bond.&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;Intermediaries should be allowed to charge for the time      and expense associated with processing legal requests&lt;br /&gt;As an intermediary, it is time consuming and relatively expensive to understand the obligations that each country’s legal regime imposes on you, and to accurately how each legal request should be handled. Especially for intermediaries without many resources, such as forum operators or owners of home Wifi networks, the costs associated with being an intermediary can be prohibitive. Therefore, it should be within their rights to charge for their compliance costs if they are either below a certain user threshold or can show financial necessity in some way.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Legal requirements imposed on intermediaries should be      a floor, not a ceiling- ISPs can adopt more restrictive policies to more      effectively serve their users as long as they have published policies that      explain what they are doing&lt;br /&gt;The Internet has space for a wide range of platforms and applications directed to different communities, with different needs and desires. A social networking site directed at children, for example, may reasonably want to have policies that are much more restrictive than a political discussion board. Therefore, legal requirements that compel intermediaries to take down content should be seen as a ‘floor,’ but not a ‘ceiling’ on the range and quantity that of content those intermediaries may remove. Intermediaries should retain control over their own policies as long as they are transparent about what those policies are, what type of content the intermediary removes, and why they removed certain pieces of content. &lt;/li&gt;
&lt;h3 style="text-align: justify; "&gt;Principle VI: Privacy&lt;/h3&gt;
&lt;li style="text-align: justify; "&gt;It is important to protect the ability of Internet users to speak by narrowing and making less ambiguous the range of content that intermediaries can be held liable for, but it is also very important to make users feel comfortable sharing their view by ensuring that their privacy is protected. Protecting the user’s ability to share her views, especially when those views are controversial or have a direct bearing on important political issues, requires that the user can trust the intermediaries that she uses. This concept can be further broken down into three sub-principles:&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;The user’s personal information should be protected to      the greatest extent possible given the state of the art in encryption,      security, and policy&lt;br /&gt;Users will be less willing to speak on important topics if they have legitimate concerns that their data may be taken from them. As stated in the UNESCO Report, “[b]ecause of the amount of personal information held by companies and ability to access the same, a company’s practices around collection, access, disclosure, and retention are key. To a large extent a service provider’s privacy practices are influenced by applicable law and operating licenses required by the host government. These can include requirements for service providers to verify subscribers, collect and retain subscriber location data, and cooperate with law enforcement when requested. Outcome: The implications of companies trying to balance a user’s expectation for privacy with a government’s expectation for cooperation can be serious and are inadequately managed in all jurisdictions studied.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt26"&gt;&lt;sup&gt;[26]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Where possible, ISPs should help to preserve the user’s      right to speak anonymously&lt;br /&gt;An important aspect of an Internet user’s ability to exercise her right to free expression online is ability to speak anonymously. Anonymous speech is one of the great advances of the Internet as a communications medium and should be preserved to the extent possible. As noted by special rapporteur Frank LaRue, “[i]n order for individuals to exercise their right to privacy in communications, they must be able to ensure that these remain private, secure and, if they choose, anonymous. Privacy of communications infers that individuals are able to exchange information and ideas in a space that is beyond the reach of other members of society, the private sector, and ultimately the State itself. Security of communications means that individuals should be able to verify that only their intended recipients, without interference or alteration, receive their communications and that the communications they receive are equally free from intrusion. Anonymity of communications is one of the most important advances enabled by the Internet, and allows individuals to express themselves freely without fear of retribution or condemnation.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt27"&gt;&lt;sup&gt;[27]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;The user’s PII should never be sold or used without her      consent, and she should always know what is being done with it via an      easily comprehensible dashboard&lt;br /&gt;The user’s trust in the online platform that she uses and relies upon is influenced not only by the relationships the intermediary maintains with the government, but also with other commercial entities. A user, who feels that her data will be constantly shared with third parties, perhaps without her consent and/or for marketing purposes, will never feel like she is able to freely express her opinion. Therefore, it is the intermediary’s responsibility to ensure that its users know exactly what information it retains about them, who it shares that information with and under what circumstances, and how to change the way that her data is shared. All of this information should be available on a dashboard that is comprehensible to the average user, and which gives her the ability to easily modify or withdraw her consent to the way her data is being shared, or the amount of data, or specific data, that the intermediary is retaining about her.&lt;/li&gt;
&lt;h3 style="text-align: justify; "&gt;Principle VII: Access to Remedy&lt;/h3&gt;
&lt;li style="text-align: justify; "&gt;As noted in the UNESCO Report, “Remedy is the third central pillar of the UN Guiding Principles on Business and Human Rights, placing an obligation both on governments and on companies to provide individuals access to effective remedy. This area is where both governments and companies are almost consistently lacking. Across intermediary types, across jurisdictions and across the types of restriction, individuals whose content is restricted and individuals who wish to access such content are offered little or no effective recourse to appeal restriction decisions, whether in response to government orders, third party requests or in accordance with company policy. There are no private grievance or due process mechanisms that are clearly communicated and readily available to all users, or consistently applied.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt28"&gt;&lt;sup&gt;[28]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;&lt;br /&gt;Any notice and takedown system is subject to abuse, and any company policy that results in the removal of content is subject to mistaken or inaccurate takedowns, both of which are substantial problems that can only be remedied by the ability for users to let the intermediary know when the intermediary improperly removed a specific piece of content and the technical and procedural ability of the intermediary to put the content back. However, the technical ability to reinstate content that was improperly removed may conflict with data retention laws. This conflict should be explored in more detail. In general, however, every time content is removed, there should be:&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;A clear mechanism through which users can request      reinstatement of content&lt;br /&gt;When an intermediary decides to remove content, it should be immediately clear to the user that content has been removed and why it was removed (see discussion of in-product notice, supra). If the user disagrees with the content removal decision, there should be an obvious, online method for her to request reinstatement of the content.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Reinstatement of content should be technically possible&lt;br /&gt;When intermediaries (who are subject to intermediary liability) are building new products, they should build the capability to remove content into the product with a high degree of specificity so as to allow for narrowly tailored content removals when a removal is legally required. Relatedly, all online intermediaries should build the capability to reinstate content into their products while maintaining compliance with data retention laws.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Intermediaries should have policies and procedures in      place to handle reinstatement requests&lt;br /&gt;Between the front end (online mechanism to request reinstatement of content) and the backend (technical ability to reinstate content) is the necessary middle layer, which consists of the intermediary’s internal policies and processes that allow for valid reinstatement requests to be assessed and acted upon. In line with the corporate ‘responsibility to respect’ human rights, and considered along with the human rights principle of ‘access to remedy,’ intermediaries should have a system in place from the time that an online product launches to ensure that reinstatement requests can be made and will be processed quickly and appropriately.&lt;/li&gt;
&lt;h3 style="text-align: justify; "&gt;Principle VIII: Accountability&lt;/h3&gt;
&lt;li style="text-align: justify; "&gt;Governments must ensure that independent, transparent,      and impartial accountability mechanisms exist to verify the practices of      government and companies with regards to managing content created online&lt;br /&gt;“While it is important that companies make commitments to core principles on freedom of expression and privacy, make efforts to implement those principles through transparency, policy advocacy, and human rights impact assessments, it is also important that companies take these steps in a manner that is accountable to stakeholders. One way of doing this is by committing to external third party assurance to verify that their policies and practices are being implemented to a meaningful standard, with acceptable consistency wherever their service is offered. Such assurance gains further public credibility when carried out with the supervision and affirmation of multiple stakeholders including civil society groups, academics, and responsible investors. The Global Network Initiative provides one such mechanism for public accountability.  Companies not currently participating in GNI, or a process of similar rigor and multi-stakeholder involvement, should be urged by users, investors, and regulators to do so.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt29"&gt;&lt;sup&gt;[29]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Civil society should encourage comparative studies      between countries and between ISPs with regards to their content removal      practices to identify best practices&lt;br /&gt;Civil society has the unique ability to look longitudinally across this issue to determine and compare how different intermediaries and governments are responding to content removal requests. Without information about how other governments and intermediaries are handling these issues, it will be difficult for each government or intermediary to learn how to improve its laws or policies. Therefore, civil society has an important role to play in the process of creating increasingly better human rights outcomes for online platforms by performing and sharing ongoing, comparative research.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Civil society should establish best practices and      benchmarks against which ISPs and government can be measured, and should      track governments and ISPs over time in public reports&lt;br /&gt;“A number of projects that seek, define and implement indicators and benchmarks for governments or companies are either in development (examples include: UNESCO’s Indicators of Internet Development project examining country performance, Ranking Digital Rights focusing on companies) or already in operation (examples include the Web Foundation’s Web Index, Freedom House’s Internet Freedom Index, etc.). The emergence of credible, widely-used benchmarks and indicators that enable measurement of country and company performance on freedom of expression will help to inform policy, practice, stakeholder engagement processes, and advocacy.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt30"&gt;&lt;sup&gt;[30]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;h3 style="text-align: justify; "&gt;Principle IX: Due Process - In Both Legal and Private      Enforcement&lt;/h3&gt;
&lt;li style="text-align: justify; "&gt;ISPs should always consider context before removing      content and Governments and courts should always consider context before      ordering that certain content be removed&lt;br /&gt;“Governments need to ensure that legal frameworks and company policies are in place to address issues arising out of intermediary liability. These legal frameworks and policies should be contextually adapted and be consistent with a human rights framework and a commitment to due process and fair dealing. Legal and regulatory frameworks should also be precise and grounded in a clear understanding of the technology they are meant to address, removing legal uncertainty that would provide opportunity for abuse.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt31"&gt;&lt;sup&gt;[31]&lt;/sup&gt;&lt;/a&gt;&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Principles for Courts&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;An independent and impartial judiciary exists, at least in part, to preserve the citizen’s due process rights. Many have called for an increased reliance on courts to make determinations about the legality of content posted online in order to both shift the censorship function from unaccountable private actors and to ensure that courts only order the removal of content that is actually unlawful. However, when courts do not have an adequate technical understanding of how content is created and shared on the internet, the rights of the intermediaries that facilitate the posting of the content, and who should be ordered to remove unlawful content, they do not add value to the online ecosystem. Therefore, courts should keep certain principles in mind to preserve the due process rights of the users that post content and the intermediaries that host the content.&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;Preserve due process for intermediaries- do not order      them to do something before giving them notice and the opportunity to      appear before the court&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;In a dispute between two private parties over a specific piece of content posted online, it may appear to the court that the easy solution is to order the intermediary who hosts the content to remove it. However, this approach does not extend any due process protections to the intermediary and does not adequately reflect the intermediary's status as something other than the creator of the content. If a court feels that it is necessary for an intermediary to intervene in a legal proceeding between two private parties, the court should provide the intermediary with proper notice and give them the opportunity to appear before the court before issuing any orders.&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;Necessity and proportionality of judicial      determinations- judicial orders determining the illegality of specific      content should be narrowly tailored to avoid over-removal of content &lt;/li&gt;
&lt;p style="text-align: justify; "&gt;With regards to government removal requests, the UNESCO Report notes that “[o]ver-broad law and heavy liability regimes cause intermediaries to over-comply with government requests in ways that compromise users’ right to freedom of expression, or broadly restrict content in anticipation of government demands even if demands are never received and if the content could potentially be found legitimate even in a domestic court of law.”&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt32"&gt;&lt;sup&gt;[32]&lt;/sup&gt;&lt;/a&gt; Courts should follow the same principle: only order the removal of the bare minimum of content that is necessary to remedy the harm identified and nothing more.&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;Courts should clarify whether ISPs have to remove      content in response to court orders directed to third parties, or only      have to remove content when directly ordered to do so (first party court      orders) after an adversarial proceeding to which the ISP was a party&lt;/li&gt;
&lt;p style="text-align: justify; "&gt;See discussion of the difference between first party and third party court orders (supra, section a., “Transparency”). Ideally, any decision that courts reach on this issue would be consistent across different countries.&lt;/p&gt;
&lt;li style="text-align: justify; "&gt;Questions- related unresolved issues that should be      kicked to the larger group&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;How should the conflict between access to remedy and      data retention laws that say content must be hard deleted after a certain      period of time be resolved?  I think the access to remedy has to be      subordinated to the data protection laws. Let's make that our draft      position, but continue to flag it for discussion.&lt;/li&gt;
&lt;li style="text-align: justify; "&gt;Should ISPs have to remove      content in response to court orders directed to third parties, or only      have to remove content when directly ordered to do so (first party court      orders) after an adversarial proceeding to which the ISP was a party?       I think first party orders.  Let's make that our draft      position, but continue to flag it for discussion.&lt;/li&gt;
&lt;hr style="text-align: justify; " /&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref1"&gt;[1]&lt;/a&gt; Center for Democracy and Technology, Shielding the Messengers: Protecting Platforms for Expression and Innovation at 4-15 (Version 2, 2012), available at &lt;a href="https://www.google.com/url?q=https%3A%2F%2Fwww.cdt.org%2Ffiles%2Fpdfs%2FCDT-Intermediary-Liability-2012.pdf&amp;amp;sa=D&amp;amp;sntz=1&amp;amp;usg=AFQjCNHNG5ji0HEiYXyelfwwK8qTCgOHiw"&gt;https://www.cdt.org/files/pdfs/CDT-Intermediary-Liability-2012.pdf&lt;/a&gt; (see pp.4-15 for an explanation of these different models and the pros and cons of each).&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref2"&gt;[2]&lt;/a&gt; UNESCO, “Fostering Freedom Online: The Roles, Challenges, and Obstacles of Internet Intermediaries” at 6-7 (Draft Version, June 16th, 2014) (Hereinafter “UNESCO Report”).&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref3"&gt;[3]&lt;/a&gt; UNESCO Report at 56.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref4"&gt;[4]&lt;/a&gt; UNESCO Report at 37.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref5"&gt;[5]&lt;/a&gt; Center for Democracy and Technology, Additional Responses Regarding Notice and Action, Available at https://www.cdt.org/files/file/CDT%20N&amp;amp;A%20supplement.pdf.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref6"&gt;[6]&lt;/a&gt; The United Nations (UN) Special Rapporteur on Freedom of Opinion and Expression, the Organization for Security and Co-operation in Europe (OSCE) Representative on Freedom of the Media, the Organization of American States (OAS) Special Rapporteur on Freedom of Expression and the African Commission on Human and Peoples’ Rights (ACHPR) Special Rapporteur on Freedom of Expression and Access to Information, Article 19, Global Campaign for Free Expression, and the Centre for Law and Democracy, JOINT DECLARATION ON FREEDOM OF EXPRESSION AND THE INTERNET at 2 (2011), available at &lt;a href="http://www.google.com/url?q=http%3A%2F%2Fwww.osce.org%2Ffom%2F78309&amp;amp;sa=D&amp;amp;sntz=1&amp;amp;usg=AFQjCNF8QmlhRMreM_BT0Eyfrw_J7ZdTGg"&gt;http://www.osce.org/fom/78309&lt;/a&gt; (Hereinafter “Joint Declaration on Freedom of Expression).&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref7"&gt;[7]&lt;/a&gt; Center for Democracy and Technology, Additional Responses Regarding Notice and Action, Available at https://www.cdt.org/files/file/CDT%20N&amp;amp;A%20supplement.pdf.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref8"&gt;[8]&lt;/a&gt; Id.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref9"&gt;[9]&lt;/a&gt; Id.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref10"&gt;[10]&lt;/a&gt; UNESCO Report at 113-14.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref11"&gt;[11]&lt;/a&gt; ‘Chilling Effects’ is a website that allows recipients of ‘cease and desist’ notices to submit the notice to the site and receive information about their legal rights. For more information about ‘Chilling Effects’ see: http://www.chillingeffects.org.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref12"&gt;[12]&lt;/a&gt; Id. at 73. You can see an example of a complaint published on Chilling Effects at the following location. “DtecNet DMCA (Copyright) Complaint to Google,” Chilling Effects Clearinghouse, March 12, 2013, www.chillingeffects.org/notice.cgi?sID=841442.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref13"&gt;[13]&lt;/a&gt; UNESCO Report at 73.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref14"&gt;[14]&lt;/a&gt; Article 19, Internet Intermediaries: Dilemma of Liability (2013), available at http://www.article19.org/data/files/Intermediaries_ENGLISH.pdf.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref15"&gt;[15]&lt;/a&gt; UNESCO Report at 120.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref16"&gt;[16]&lt;/a&gt; Joint Declaration on Freedom of Expression and the Internet at 2.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref17"&gt;[17]&lt;/a&gt; Id.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref18"&gt;[18]&lt;/a&gt; Id.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref19"&gt;[19]&lt;/a&gt; Id. at 121.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref20"&gt;[20]&lt;/a&gt; Id. at 104.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref21"&gt;[21]&lt;/a&gt; Id. at 122.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref22"&gt;[22]&lt;/a&gt; Center for Democracy and Technology, Shielding the Messengers: Protecting Platforms for Expression and Innovation at 12 (Version 2, 2012), available at https://www.cdt.org/files/pdfs/CDT-Intermediary-Liability-2012.pdf.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref23"&gt;[23]&lt;/a&gt; Joint Declaration on Freedom of Expression at 2-3.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref24"&gt;[24]&lt;/a&gt; Geist, Michael, Rogers Provides New Evidence on Effectiveness of Notice-and-Notice System (2011), available at http://www.michaelgeist.ca/2011/03/effectiveness-of-notice-and-notice/&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref25"&gt;[25]&lt;/a&gt; Center for Democracy and Technology, Chile’s Notice-and-Takedown System for Copyright Protection: An Alternative Approach (2012), available at https://www.cdt.org/files/pdfs/Chile-notice-takedown.pdf&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref26"&gt;[26]&lt;/a&gt; UNESCO Report at 54.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref27"&gt;[27]&lt;/a&gt; “Report of the Special Rapporteur on the promotion and protection of the right to freedom of opinion and expression, Frank La Rue (A/HRC/23/40),” United Nations Human Rights, 17 April 2013, http://www.ohchr.org/Documents/HRBodies/HRCouncil/RegularSession/Session23/A.HRC.23.40_EN.pdf, § 24, p. 7.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref28"&gt;[28]&lt;/a&gt; UNESCO Report at 118.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref29"&gt;[29]&lt;/a&gt; UNESCO Report at 122.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref30"&gt;[30]&lt;/a&gt; Id.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref31"&gt;[31]&lt;/a&gt; UNESCO Report at 120.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;&lt;a href="https://docs.google.com/document/d/1S3pSuo49pqI7gIxP0-ogmVstk7EEnPRs2MPX7ncxrmc/pub#ftnt_ref32"&gt;[32]&lt;/a&gt; Id. at 119.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/zero-draft-of-content-removal-best-practices-white-paper'&gt;https://cis-india.org/internet-governance/blog/zero-draft-of-content-removal-best-practices-white-paper&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>jyoti</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    

   <dc:date>2014-09-10T07:11:09Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/news/webinar-on-the-draft-intermediary-guidelines-amendment-rules">
    <title>Webinar on the draft Intermediary Guidelines Amendment Rules</title>
    <link>https://cis-india.org/internet-governance/news/webinar-on-the-draft-intermediary-guidelines-amendment-rules</link>
    <description>
        &lt;b&gt;CCAOI and the ISOC Delhi Chapter organised a webinar on January 10 to discuss the draft  "The Information Technology [Intermediary Guidelines (Amendment) Rules] 2018". Gurshabad Grover was a discussant in the panel.&lt;/b&gt;
        &lt;p&gt;The agenda of the discussion was:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;A brief introduction to the draft highlighting the key issues[Shashank Mishra]&lt;/li&gt;
&lt;li&gt;Invited experts sharing their view on the paper and questions asked [Nehaa Chaudhari, Paul Brooks, Arjun Sinha, Gurshabad Grover]&lt;/li&gt;
&lt;li&gt;Open Discussion Q&amp;amp;A&lt;/li&gt;
&lt;li&gt;Summarizing the session&lt;/li&gt;
&lt;/ul&gt;
&lt;div&gt;A recording of the session can be &lt;a class="external-link" href="https://livestream.com/internetsociety/intermediaryrules"&gt;accessed here&lt;/a&gt;&lt;/div&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/news/webinar-on-the-draft-intermediary-guidelines-amendment-rules'&gt;https://cis-india.org/internet-governance/news/webinar-on-the-draft-intermediary-guidelines-amendment-rules&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Admin</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    

   <dc:date>2019-01-18T02:13:23Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/news/roundtable-discussion-on-intermediary-liability">
    <title>Roundtable Discussion on Intermediary Liability</title>
    <link>https://cis-india.org/internet-governance/news/roundtable-discussion-on-intermediary-liability</link>
    <description>
        &lt;b&gt;Tanaya Rajwade participated in a roundtable discussion on intermediary liability organised by SFLC and the Dialogue in New Delhi on October 17, 2019.&lt;/b&gt;
        &lt;p&gt;Click to view the &lt;a class="external-link" href="http://cis-india.org/internet-governance/files/internet-liability"&gt;agenda&lt;/a&gt;.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/news/roundtable-discussion-on-intermediary-liability'&gt;https://cis-india.org/internet-governance/news/roundtable-discussion-on-intermediary-liability&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Admin</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    

   <dc:date>2019-10-20T07:08:11Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/a2k/news/medianama-roundtables-on-intermediary-liability-rules">
    <title>MediaNama roundtables on intermediary liability rules</title>
    <link>https://cis-india.org/a2k/news/medianama-roundtables-on-intermediary-liability-rules</link>
    <description>
        &lt;b&gt;MediaNama hosted one policy round-table on Intermediary Liability protections in Bangalore and another round-table in New Delhi, to discuss inputs sought by MEITY on the amendments to Safe Harbor for platforms (payments services, content services, ISPs, etc.) in India. Centre for Internet &amp; Society is a community partner for the event.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;One round-table was held at St. Mark's Hotel in Bangalore on January 25, 2019 and the next one will be held at India Habitat Centre in New Delhi on February 7, 2019. Gurshabad Grover participated in the meeting held on January 25, 2019. Participants discussed the draft amendments to the intermediary liability rules (under Section 79 of the IT Act) and recommendations stakeholders could respond with. For more info &lt;a class="external-link" href="https://www.medianama.com/2019/01/223-announcing-nama-event-on-the-future-of-online-safe-harbor-bangalore-delhi-ad/"&gt;click here&lt;/a&gt;.&lt;/p&gt;
&lt;hr /&gt;
&lt;p&gt;MediaNama has posted some pieces after the discussion that may be of interest:&lt;/p&gt;
&lt;ul&gt;
&lt;li&gt;&lt;a class="external-link" href="https://www.medianama.com/2019/02/223-namapolicy-no-clarity-on-what-constitutes-offenses-for-intermediaries-alok-prasanna-kumar/"&gt;No clarity on what constitutes offenses for intermediaries&lt;/a&gt; (by Alok Prasanna Kumar)&lt;/li&gt;
&lt;li&gt;&lt;a class="external-link" href="https://www.medianama.com/2019/02/223-regulation-of-intermediaries-nama/"&gt;Should different sizes or categories of intermediaries be regulated differently?&lt;/a&gt; (by Nikhil Pahwa)&lt;/li&gt;
&lt;li&gt;&lt;a class="external-link" href="https://www.medianama.com/2019/02/223-safe-harbor-intermediary-liability-traceability/"&gt;The Intent of Traceability is behavioral change&lt;/a&gt; (by Nikhil Pahwa)&lt;/li&gt;
&lt;/ul&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/a2k/news/medianama-roundtables-on-intermediary-liability-rules'&gt;https://cis-india.org/a2k/news/medianama-roundtables-on-intermediary-liability-rules&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Admin</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    

   <dc:date>2019-02-17T15:59:33Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/response-to-the-draft-of-the-information-technology-intermediary-guidelines-amendment-rules-2018">
    <title>Response to the Draft of The Information Technology [Intermediary Guidelines (Amendment) Rules] 2018</title>
    <link>https://cis-india.org/internet-governance/blog/response-to-the-draft-of-the-information-technology-intermediary-guidelines-amendment-rules-2018</link>
    <description>
        &lt;b&gt;In this response, we aim to examine whether the draft rules meet tests of constitutionality and whether they are consistent with the parent Act. We also examine potential harms that may arise from the Rules as they are currently framed and make recommendations to the draft rules that we hope will help the Government meet its objectives while remaining situated within the constitutional ambit.&lt;/b&gt;
        
&lt;p&gt;&lt;br style="text-align: start;" /&gt;&lt;span style="text-align: start; float: none;"&gt;This document presents the Centre for Internet &amp;amp; Society (CIS) response&lt;/span&gt;&lt;span style="text-align: start; float: none;"&gt; to the Ministry of Electronics and Information Technology’s invitation&lt;/span&gt;&lt;span style="text-align: start; float: none;"&gt; to comment and suggest changes to the draft of The Information&lt;/span&gt;&lt;span style="text-align: start; float: none;"&gt; Technology [Intermediary Guidelines (Amendment) Rules] 2018 (hereinafter&lt;/span&gt;&lt;span style="text-align: start; float: none;"&gt; referred to as the “draft rules”) published on December 24, 2018. CIS is&lt;/span&gt;&lt;span style="text-align: start; float: none;"&gt; grateful for the opportunity to put forth its views and comments. This response was sent on the January 31, 2019.&lt;/span&gt;&lt;br style="text-align: start;" /&gt;&lt;br style="text-align: start;" /&gt;&lt;span style="text-align: start; float: none;"&gt;In this response, we aim to examine whether the draft rules meet tests&lt;/span&gt;&lt;span style="text-align: start; float: none;"&gt; of constitutionality and whether they are consistent with the parent&lt;/span&gt;&lt;span style="text-align: start; float: none;"&gt; Act. We also examine potential harms that may arise from the Rules as&lt;/span&gt;&lt;span style="text-align: start; float: none;"&gt; they are currently framed and make recommendations to the draft rules&lt;/span&gt;&lt;span style="text-align: start; float: none;"&gt; that we hope will help the Government meet its objectives while&lt;/span&gt;&lt;span style="text-align: start; float: none;"&gt; remaining situated within the constitutional ambit.&lt;/span&gt;&lt;/p&gt;
&lt;p&gt;&lt;span style="text-align: start; float: none;"&gt;The response can be accessed &lt;a href="https://cis-india.org/internet-governance/resources/Intermediary%20Liability%20Rules%202018.pdf"&gt;here&lt;/a&gt;.&lt;br /&gt;&lt;/span&gt;&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/response-to-the-draft-of-the-information-technology-intermediary-guidelines-amendment-rules-2018'&gt;https://cis-india.org/internet-governance/blog/response-to-the-draft-of-the-information-technology-intermediary-guidelines-amendment-rules-2018&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Gurshabad Grover, Elonnai Hickok, Arindrajit Basu, Akriti</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    

   <dc:date>2019-02-07T08:06:41Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/news/2019-international-asia-conference">
    <title>2019 International Asia Conference</title>
    <link>https://cis-india.org/internet-governance/news/2019-international-asia-conference</link>
    <description>
        &lt;b&gt;ITECHLAW organized the 2019 edition of International Asia Conference at JW Marriott hotel in Bangalore on January 31, 2019 and February 1, 2019. Sunil Abraham was a panelist in the session "Policy Making for the Emerging Tech in India".&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;The rush of emerging technologies of Machine Learning, Internet of Things (IoT) and Virtual Reality (VR) is revolutionising the landscape in which humans exist. Innovators of the generation are ambitious, and their contributions have significantly impacted on various fields like healthcare, media and entertainment, agriculture, and other service models. As these technology advancements are driving new business and service models, there is a need for stakeholders and governments to ensure security and stability of the market without stifling innovations, stigmatising incentives or creating obstacles. Rapid spreading technology applications are resulting in drastic changes in today’s regulatory model, posing the difficult challenges for regulators. In India, the expeditiously developing start-up ecosystem and online consumer base, has stirred the regulators.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Intermediary liability, surveillance, data and privacy, digital taxation, data governance and sovereignty are the dominating debatable topics in India. The debates are not only between regulators and stakeholders, but consumers also joining in it. As the competition between Indian and Foreign Technology intensifies in the turf, the debate on tech-policy is considerably being mentioned in run-up of political parties to the general elections as well. Over the past one year, the country has witnessed some landmark judgments and contentious government proposals related to data and privacy, implications of which have affected over-the-top (“OTT”) services, online media, social media, e-commerce platforms, IoT services etc. The Indian regulatory framework on tech-policy is becoming stricter due to a very disruptive phase last year. The tech-giants like Facebook, Google, Twitter, and Amazon are themselves realising their enormous market influence. After the episodes of lynching, hate speeches etc., they are participating in policy-making efforts related to fake news and digital malfeasance. In this process legal industry is making considerable lobbying efforts for corporations to work with government to curb the menace of digital malpractice and make the internet safer.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;As the legal industry is participating in the process of creating an innovators-friendly regulatory regime, they are also striving to understand the disruptive technologies and adopt them for their own convenience. However, legal firms must understand that the technology cannot do their job for clients but can only upgrade the business model for them. The traditional law firm business model is not in sync with legal buyers. Effective deployment of technology will ameliorate the factor of its approachability to its clients.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;With the growing technology-based start-ups in India, it is going to be a hub for investments by big corporations. In order to keep attracting the investors there is a need for government to remove the potential hindrances that may make investors double-think. The government should prepare a level-playing field in the market by making citizens aware of the standard tech-policies and fostering the innovators-friendly regulatory regime.&lt;/p&gt;
&lt;hr /&gt;
&lt;p style="text-align: justify; "&gt;For more info &lt;a class="external-link" href="https://www.itechlaw.org/Bangalore2019"&gt;see the website&lt;/a&gt;&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/news/2019-international-asia-conference'&gt;https://cis-india.org/internet-governance/news/2019-international-asia-conference&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Admin</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    

   <dc:date>2019-02-19T00:23:43Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/a-look-at-two-problematic-provisions-of-the-draft-anti-trafficking-bill">
    <title>A look at two problematic provisions of the draft Anti-trafficking bill</title>
    <link>https://cis-india.org/internet-governance/blog/a-look-at-two-problematic-provisions-of-the-draft-anti-trafficking-bill</link>
    <description>
        &lt;b&gt;This post examines two badly drafted provisions of the new Anti-Trafficking bill that have the potential to severely impinge upon the Freedom of Expression, including through a misunderstanding of intermediary liability. &lt;/b&gt;
        
&lt;p style="text-align: justify;" class="normal"&gt;On 28 Feb 2018, the Union Cabinet approved                   ‘The Trafficking of Persons (Prevention, Protection                   and Rehabilitation) Bill, 2018’ (‘the bill’) for                   introduction to the Parliament. This comes after a                   series of consultations on an earlier 2016 draft bill,                   that had faced its fair share of &lt;a href="https://scroll.in/article/813268/six-counts-on-which-the-draft-anti-trafficking-bill-fails-short" target="_blank"&gt;criticism&lt;/a&gt;. As per the Press Information Bureau &lt;a href="http://pib.nic.in/newsite/PrintRelease.aspx?relid=176878" target="_blank"&gt;announcement&lt;/a&gt;, the Ministry of Women and Child                   Development met with various stakeholders including 60                   NGOs and have incorporated many of the suggestions put                   forth. They’ve also stated that ‘the new law will make                   India a leader among South Asian countries to combat                   trafficking.’&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;However, at first glance, there appear to be                   several issues with overbroad or vague language used                   in the drafting of the bill, that stretch it into                   potentially problematic areas. This current post will                   focus on two such provisions that could lead to a                   deleterious effect on the Freedom of Expression. As                   the bill is currently not publicly available, a                   stakeholder’s copy of the draft is being used to                   source these provisions. The relevant sections have                   been reproduced below for convenience. (Emphasis in                   bold is as provided by the author).&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;em&gt;Section                     39: Buying or Selling of any person&lt;/em&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;em&gt;39. (l) Whoever buys or sells any person                     for a consideration, shall be punished with rigorous                     imprisonment for a term which shall not be less than                     seven years but may extend to ten years, and shall                     also be liable to fine which shall not be less than                     one lakh rupees.&lt;/em&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;em&gt;(2) Whoever solicits or publicises                     electronically, taking or distributing obscene                     photographs or videos or providing materials or                     soliciting or guiding tourists or using agents or                     any other form &lt;strong&gt;which may lead                       to the trafficking of a person shall be punished&lt;/strong&gt; with rigorous imprisonment for a term which shall                     not be less than five years but may extend to ten                     years, and shall also be liable to fine which shall                     not be less than fifty thousand rupees but which may                     extend to one lakh rupees.&lt;/em&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;The grammatical acrobatics of section 39(2)                   aside, this anti-solicitation provision is severely                   problematic in that it mandates punishment even for a                   vaguely defined action or actions that may not                   actually be connected to the trafficking of a person.                   In other words, the provision doesn’t require any of                   the actions to be connected to trafficking in their                   intent or even outcome, but only in &lt;em&gt;potential&lt;/em&gt; &lt;em&gt;connection&lt;/em&gt; to the outcome. At the same time, it says these                   ‘shall’ be punished!&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;This vagary that ignores actual or even                   probabilistic causation flies in the face of standard                   criminal law which requires &lt;em&gt;mens rea&lt;/em&gt; along with &lt;em&gt;actus                     rea&lt;/em&gt;. The excessively wide scope of this badly                   drafted provision leaves it prone to abuse. For                   example, currently the provision allows the following                   interpretation to be included: ‘Whoever publicizes                   electronically, by providing materials in any form,                   which may lead to trafficking of a person shall be                   punished…’. Even the electronic publicizing of an                   academic study on trafficking could fall under the                   provision as it currently reads, if it is argued that                   publishing studies that show the prevalence of                   trafficking ‘may lead to the trafficking of a person’!                   It is not hard to imagine that an academic study that                   shows trafficking numbers at embarrassingly high rates                   could be threatened with this provision. Similarly,                   any of our vast number of self-appointed moral                   guardians could also pull within this provision any                   artistic work that they may personally find offensive                   or ‘obscene’. Simply put, without any burden of                   showing a causal connect, it could be argued that &lt;em&gt;anything&lt;/em&gt; ‘may                   lead’ to the trafficking of a person. Needless to say,                   this paves the way for a severe chilling effect on                   free speech, especially on critical speech around                   trafficking issues.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;em&gt;Section 41: Offences related to media&lt;/em&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;em&gt;41. (l) Whoever commits trafficking of a                     person with the aid of media, including, but not                     limited to print, internet, digital or electronic                     media, shall be punished with rigorous imprisonment                     for a term which shall not be less than seven years                     but may extend to ten years and shall also be liable                     to fine which shall not be less than one lakh                     rupees.&lt;/em&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;&lt;em&gt;(2) Whoever &lt;strong&gt;distributes,                       or sells or stores&lt;/strong&gt;, in any form in any                     electronic or printed form showing incidence of                     sexual exploitation, sexual assault, or rape for the                     purpose of exploitation or for coercion of the                     victim or his family members, or for unlawful gain &lt;strong&gt;shall be                       punished&lt;/strong&gt; with rigorous imprisonment for a term                     which shall not be less than three years but may                     extend to seven years and shall also be liable to                     fine which shall not be less than one lakh rupees.&lt;/em&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;The drafters of this bill have perhaps                   overlooked the fact that unlike the physical world,                   the infrastructure of the electronic / digital world                   requires 3rd party intermediaries to handle                   information during most forms of electronic                   activities, whether it is transmission, storage or                   display. As it is not feasible, desirable or even                   practically possible for intermediaries to verify the                   legality of every bit of data that gets transferred or                   stored by the intermediary, ‘safe harbours’ are                   provided in law for intermediaries, protecting them                   from liability of the information being transmitted                   through them. These ensure that entities that act as                   architectural requirements and intermediary platforms                   are able to operate smoothly and without fear. If                   intermediaries are not granted this protection, it                   puts them in the unenviable position of having to                   monitor un-monitorable amounts of data, and face legal                   action for the slip-ups that are bound to happen                   regularly. Furthermore, there are several levels of                   free speech and privacy issues associated with having                   multiple gatekeepers on the expression of speech                   online. A charitable reading of the intent of a                   provision which does not recognise safe harbours for                   3rd party intermediaries, would be that the drafters                   of the bill have simply not realised that users who                   upload and initiate transfer of information online,                   are not the same parties who do the actual                   transmission of the information.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;Distribution, selling or storing of                   information online would require the transmission of                   information over intermediaries, as well as the                   temporary storage of such information on intermediary                   platforms. In India, intermediaries engaging with                   transmission or temporary storage of information are                   provided safe harbour&lt;a href="imap://prasad@mail.cis-india.org:143/fetch%3EUID%3E/INBOX%3E176833#_ftn1"&gt;[1]&lt;/a&gt; by Section 79 of the Information Technology Act, 2000                   (‘IT Act’), so long as they:&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;(i) act as a mere ‘conduit’ and do not                   initiate the transmission, select the receiver of the                   transmission, or select or modify the information                   contained in the transmission.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;(ii) exercise due diligence while                   discharging duties under this Act, and observes other                   guidelines that the Central Government may prescribe.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;The Information Technology (Intermediary                   Guidelines) Rules, 2011, list out the nature of the                   due diligence to be followed by intermediaries to                   claim exemption under Section 79 of the IT Act.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;Intermediaries will not be granted safe                   harbour if they have conspired, abetted, aided or                   induced commission of the unlawful act, or if they do                   not remove or disable access to information upon                   receiving actual knowledge, or notice from the                   Government, of the information that is transmitted or                   stored by the intermediary being used for unlawful                   purposes.&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;Thus it can be seen that the IT Act already                   provides an in-depth regime for intermediary                   liability, and given its &lt;em&gt;non-obstante &lt;/em&gt;clause                   which states that Section 79 of the IT Act would apply                   “Notwithstanding anything contained in any law for the                   time being in force” ,&amp;nbsp;                   as well as the reiteration of the IT Act’s                   overriding effect via Section 81, which states that                   the provisions of the Act ‘shall have effect                   notwithstanding anything inconsistent therewith                   contained in any other law for the time being in                   force’ (barring the exercise of copyright or patent                   rights), it is generally considered the appropriate                   legal framework for this issue. However, it appears                   that the drafters of the 2018 Anti-trafficking bill                   have not considered this aspect at all, since they                   have not referenced the IT Act in this context in the                   bill, and have additionally added their own &lt;em&gt;non-obstante &lt;/em&gt;clause                   in Section 59 of the bill:&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;59.&lt;em&gt; The provisions                     of this Act, shall be in addition to and not in                     derogation of the provisions of any other law for                     the time being in force and, in case of any                     inconsistency, the provisions of this Act shall have                     overriding effect on the provisions of any such law                     to the extent of the inconsistency.&lt;/em&gt;&lt;/p&gt;
&lt;p style="text-align: justify;" class="normal"&gt;So the regime as prescribed by the IT Act                   allows for safe harbours, whereas the regime as                   prescribed by the Anti-Trafficking bill does not allow                   for safe harbours, and both say that they would an                   overriding effect for any conflicting law. This                   legislative bumble could potentially be solved by                   using the settled principle that a special Act                   prevails over a general legislation. This is still a                   little tricky as they are technically both special                   Acts. It could be argued that given the context of the                   Anti-trafficking bill as focusing on trafficking, and                   the context of the IT Act focusing on the interface of                   law and technology, that for the purposes of Section                   41(2) of the Anti-trafficking bill, the IT Act is the                   special legislation. And thus Section 79 of the IT Act                   should make redundant the relevant portion of Section                   41(2) of the Anti-trafficking bill. This reading would                   require the bill to be modified so as to remove the                   redundancy and the conflicting portion of Section                   41(2).&lt;/p&gt;
&lt;hr /&gt;
&lt;p style="text-align: justify;"&gt;[1] In 2016, a division bench of the Delhi High Court held in the case of Myspace Inc vs Super Cassettes Industries Ltd that a safe harbour immunity for intermediaries was necessary as it was not technically feasible to pre-screen content from third parties, and that tasking intermediaries with this responsibility could have a chilling effect on free speech, It held that their responsibility was limited to the extent of acting upon receiving ‘actual knowledge’. Earlier, in determining what ‘actual knowledge’ refers to, in 2015 the Supreme Court of India in the landmark case of Shreya Singhal vs Union of India, required this to be in the form of a notice via a court or government order. Thus under our current law, intermediaries are granted a safe harbour from liability so long as they act upon court or government orders which notify them of content that is required to be taken down.&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;&amp;nbsp;&lt;/p&gt;
&lt;p style="text-align: justify;"&gt;Clarification (18th August, 2018): A letter sent to the Ministry of Women and Child Development mentioned the Centre for Internet &amp;amp; Society as instituionally endorsing a critique of the The Trafficking of Persons (Prevention, Protection and Rehabilitation) Bill, 2018. We seek to clarify that the Centre for Internet &amp;amp; Society did not endorse the letter to the Ministry.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/a-look-at-two-problematic-provisions-of-the-draft-anti-trafficking-bill'&gt;https://cis-india.org/internet-governance/blog/a-look-at-two-problematic-provisions-of-the-draft-anti-trafficking-bill&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>swaraj</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    

   <dc:date>2018-08-18T09:21:55Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/news/india-the-new-front-line-in-the-global-struggle-for-internet-freedom">
    <title>India: The New Front Line in the Global Struggle for Internet Freedom </title>
    <link>https://cis-india.org/news/india-the-new-front-line-in-the-global-struggle-for-internet-freedom</link>
    <description>
        &lt;b&gt;The government tussles with Internet freedom activists in the world's largest democracy.&lt;/b&gt;
        
&lt;p&gt;&lt;a class="external-link" href="http://www.theatlantic.com/international/archive/2012/06/india-the-new-front-line-in-the-global-struggle-for-internet-freedom/258237/"&gt;This article was published in the Atlantic on June 7, 2012&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;This Saturday, Indian Internet freedom advocates are planning to stage a nation-wide protest against what they see as their government's increasingly restrictive regulation of the Internet. An amorphous alliance of concerned citizens and activist hackers intend to use the streets and the Internet itself to make their opposition felt.&amp;nbsp;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;Over the last year, as Americans were focused on the domestic debates surrounding the &lt;a class="external-link" href="http://www.forbes.com/fdc/welcome_mjx.shtml"&gt;Stop Online Piracy Act (SOPA) and the Protect Intellectual Property Act (PIPA)&lt;/a&gt;, or on the more brazen displays of online censorship by mainstays of Internet restriction like China, Iran and Pakistan, India was rapidly emerging as a key battleground in the worldwide struggle for Internet freedom.&lt;/p&gt;
&lt;p&gt;The confrontation escalated in April 2011, when the Ministry of Communications and Information Technology introduced sweeping new rules regulating the nature of material that Internet companies could host online. In response, civil liberties groups, Internet freedom supporters, and a growing assembly of online activist hackers have been fighting back, initiating street protests, organizing online petitions, and launching -- under the banner of the "Anonymous" hacker group -- a torrent of distributed denial of service (DDoS) attacks against Indian government and industry web sites.&amp;nbsp;&lt;/p&gt;
&lt;p&gt;The &lt;a class="external-link" href="http://www.mit.gov.in/sites/upload_files/dit/files/GSR314E_10511%281%29.pdf"&gt;April 2011 rules&lt;/a&gt;, an update to India's &lt;a class="external-link" href="http://www.mit.gov.in/sites/upload_files/dit/files/downloads/itact2000/it_amendment_act2008.pdf"&gt;Information Technology Act&lt;/a&gt; (IT Act) of 2000 (amended in 2008), popularly known as the "intermediary guidelines," instruct online "intermediaries" -- companies that provide Internet access, host online content, websites, or search services -- to remove, within 36 hours, any material deemed to be "grossly harmful, harassing, blasphemous," "ethnically objectionable," or "disparaging" by any Internet user who submits a formal objection letter to that intermediary. Under the guidelines, any resident of India can compel Google, at the risk of criminal and/or civil liability, to remove content from its site that the resident finds politically, religiously, or otherwise "objectionable."&amp;nbsp;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;Information Technology Minister Kapil Sibal -- the intermediary guidelines' most important government evangelist, and the head of the agency responsible for administering the guidelines -- even &lt;a class="external-link" href="http://india.blogs.nytimes.com/2011/12/05/india-asks-google-facebook-others-to-screen-user-content/"&gt;instructed Internet companies&lt;/a&gt; to go one step further and start pre-screening content for removal before it was flagged by concerned users.&amp;nbsp; This requires companies like Facebook, in effect, to determine what material might offend its users and thus violate Indian law, and then remove it from the website. With &lt;a class="external-link" href="http://articles.economictimes.indiatimes.com/2011-12-15/news/30520358_1_e-commerce-indian-internet-space-internet-and-mobile-association"&gt;over 100 million Internet users&lt;/a&gt; in India, no company could possibly monitor all its content through human intervention alone; web companies would have to set up filters and other mechanisms to take down potentially objectionable content more or less automatically.&lt;/p&gt;
&lt;p&gt;India's constitution, in large part crafted in response to the modern country's harrowing history of religious and communal violence, allows for "reasonable restrictions" on free speech. Indian officials have at times banned certain books, movies, or other materials touching on such sensitive subjects as religion and caste.&amp;nbsp;&lt;/p&gt;
&lt;p&gt;Left with little choice but to comply or risk legal action, Google, Yahoo!, and other Internet companies acquiesced and &lt;a class="external-link" href="http://www.reuters.com/article/2012/02/06/india-internet-idUSL4E8D66SM20120206"&gt;began pulling down &lt;/a&gt;webpages after receiving requests to do so. Yet many companies refused to remove all the content requested, prompting Mufti Aijaz Arshad Qasm, an Islamic scholar, and journalist Vinay Rai, respectively, to file civil and criminal suits against 22 of the largest Internet companies operating in India. The targets, including Google, Yahoo!, Facebook, and Microsoft, were accused of failing to remove material deemed to be offensive to the Prophet Mohammed, Jesus, several Hindu gods and goddesses, and various political leaders.&amp;nbsp;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;The companies have had some success in the litigation: Google India, Yahoo!, and Microsoft have all &lt;a class="external-link" href="http://online.wsj.com/article/SB10001424052702304356604577341101544076864.html"&gt;been dropped&lt;/a&gt; from the civil case after the court heard preliminary arguments; the Delhi High Court recently dismissed Microsoft from the criminal case.&amp;nbsp; Otherwise, both cases are still ongoing.&lt;/p&gt;
&lt;p&gt;India has taken its Internet regulation internationally, &lt;a class="external-link" href="http://www.thinkdigit.com/Internet/India-asks-US-to-remove-objectionable-content_9366.html"&gt;asking&lt;/a&gt; the United States government to ensure that India-specific objectionable content is removed from sites such as Facebook, Google, and YouTube, and suggesting that these companies should be asked to relocate their servers to India in to order better to regulate the content locally.&lt;/p&gt;
&lt;p&gt;The Indian government's state-centric view of Internet regulation and governance is also clear in their approach to international governance. Citing the need for more governmental input in the Internet's development and what happens online, India formally &lt;a class="external-link" href="http://content.ibnlive.in.com/article/21-May-2012documents/full-text-indias-un-proposal-to-control-the-internet-259971-53.html"&gt;proposed the creation&lt;/a&gt; of the Committee for Internet Related Policies (CIRP) at the 2011 United Nations General Assembly. The CIRP would be an entirely new multilateral UN body responsible for coordinating virtually all Internet governance functions, including multilateral treaties.&amp;nbsp;&lt;/p&gt;
&lt;p&gt;To be fair, some Indians see these as efforts not to impose censorship but to allow a greater degree of Indian and international control over a system considered by many in India and elsewhere to be &lt;a class="external-link" href="http://www.thehindu.com/opinion/op-ed/article3426292.ece"&gt;under the thumb of the U.S. government&lt;/a&gt;.&amp;nbsp;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;Yet some Internet experts in both India and the West are criticizing the CIRP proposal as part of "&lt;a class="external-link" href="http://articles.timesofindia.indiatimes.com/2012-05-21/internet/31800574_1_governance-cyber-security-internet"&gt;thinly masked efforts to control or shape the Internet&lt;/a&gt;," as one Indian official put it. They&lt;a class="external-link" href="http://www.huffingtonpost.com/joe-waz/internet-governance-at-a-_b_1203125.html"&gt; warn&lt;/a&gt; that a state-centric system of Internet governance could lead to serious restrictions on the type of information available online, and damage the Internet's potential for innovation.&lt;/p&gt;
&lt;p&gt;&lt;img src="https://cis-india.org/home-images/IndiaAnonymous.jpg/image_preview" alt="India Anonymous" class="image-inline image-inline" title="India Anonymous" /&gt;&lt;/p&gt;
&lt;p&gt;India's Internet freedom advocates are straining to keep up with the rapid pace of the last year. But, now, they're gathering some steam. Online petitions against the intermediary guidelines, the IT Act, and censorship in India in general have appeared on &lt;a class="external-link" href="https://www.change.org/petitions/mps-of-india-support-the-annulment-motion-to-protect-internet-freedom-stopitrules"&gt;Change.org&lt;/a&gt; and &lt;a class="external-link" href="https://www.facebook.com/saveyourvoice"&gt;Facebook&lt;/a&gt;; &lt;a class="external-link" href="http://www.youtube.com/watch?v=HtA194jig3s"&gt;protest videos&lt;/a&gt; are popping up on Youtube. The Centre for Internet and Society, a web-focused think tank, released an &lt;a href="https://cis-india.org/internet-governance/chilling-effects-on-free-expression-on-internet" class="external-link"&gt;extensive report highlighting&lt;/a&gt; the intermediary guidelines' effects on freedom online. The Internet Democracy Project &lt;a class="external-link" href="http://lighthouseinsights.in/bloggers-against-internet-censorship.html"&gt;organized a day-long training program&lt;/a&gt; on freedom of expression and censorship for bloggers entitled "Make Blog not War." FreeSoftware Movement Karnataka organized a protest of hundreds of students in Bangalore, India's IT hub. And Save Your Voice activists &lt;a class="external-link" href="http://kafila.org/2012/04/22/freedom-in-the-cage-photos-from-a-protest-against-internet-censorship-in-delhi/"&gt;held a sit in&lt;/a&gt; outside Delhi's Jantar Mantar monument to pressure lawmakers.&lt;/p&gt;
&lt;p&gt;Yet, not all the opposition has been so civil. Hackers, operating under the umbrella of the techno-libertarian hacker community, "Anonymous," are waging their own, less lawful fight against the government as well as the Internet companies that have, in their view, too readily complied with the government's censorship demands.&amp;nbsp;&lt;/p&gt;
&lt;p&gt;On May 17, Anonymous hackers attacked a number of Indian &lt;a class="external-link" href="http://tech2.in.com/news/web-services/supreme-court-website-hacked-in-response-to-tpb-vimeo-block/307532"&gt;government websites&lt;/a&gt;, including the Indian Supreme Court, the Reserve Bank of India, the ruling Congress Party and its &lt;a class="external-link" href="http://windowsera.com/anonymous-india-hacks-aitmc-mizoram-government-website-redirects-to-twitter"&gt;coalition partners&lt;/a&gt;, as well as the opposition Bhartiya Janata Party (BJP), making them all inaccessible for several hours.&amp;nbsp;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;Moreover, just this past week, Anonymous broke into the websites and servers of a number of Internet Service Providers, including &lt;a class="external-link" href="http://www.firstpost.com/tech/anonymous-strikes-rcom-to-protest-india-net-censorship-322241.html"&gt;Reliance Communications&lt;/a&gt;, seemingly to punish them for complying with government orders to block file-sharing hosts such as Pirate Bay and Vimeo. Once in the ISPs' servers, the hackers accessed their lists of &lt;a class="external-link" href="http://tech2.in.com/news/general/anonymous-india-releases-blocked-sites-list-plans-peaceful-protest/310682"&gt;blocked sites&lt;/a&gt; -- which they then distributed to media outlets. They also redirected people who tried to reach Reliance's site to an Anonymous &lt;a class="external-link" href="http://www.cio.in/sites/default/files/topstory/2012/05/reliance_network_hacked.JPG"&gt;protest page&lt;/a&gt;.&amp;nbsp;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;Building on the momentum of these attacks, and on the anti-censorship outrage growing across India, Anonymous &lt;a class="external-link" href="http://articles.timesofindia.indiatimes.com/2012-05-31/internet/31920036_1_occupy-protests-government-sites-website"&gt;has called for a national day of protest&lt;/a&gt; in 11 Indian cities this Saturday, and an additional series online attacks against government and industry websites. The occupy-style protests -- which Anonymous insists will be non-violent -- are to include awareness campaigns on Facebook and other social networking sites. Protesters are being asked to don the &lt;a class="external-link" href="http://en.wikipedia.org/wiki/File:Anonymous_at_Scientology_in_Los_Angeles.jpg"&gt;Guy Fawkes mask&lt;/a&gt;, a symbol now associated with Anonymous, among other protest movements, both in the streets and on their Facebook profiles.&amp;nbsp;&lt;/p&gt;
&lt;p&gt;It's unclear how much support the June 9 protest will receive, or how serious the planned Anonymous attacks with be, but given the attention that the announcement has attracted in the Indian media, it seems likely that people will at least be paying attention. And even if this weekend the protest fails to attract the type of large and vocal response protest organizers are hoping it will, that it's come so far is an indication that neither side looks ready to back down.&lt;/p&gt;
&lt;p&gt;Still, the government has given some small signs recently that it is reconsidering its position on the "intermediary guidelines," if not on Internet regulation more generally. Information Technology Minister Sibal, under pressure from the political opposition and after Parliament Member P. Rajeeve tabled a motion to seek rescission of the new rules,&lt;a class="external-link" href="http://indiatoday.intoday.in/story/kapil-sibal-promises-to-rethink-on-internet-censorship/1/189265.html"&gt; indicated&lt;/a&gt; that he would reconsider his previous positions, and the government has agreed to &lt;a class="external-link" href="http://articles.economictimes.indiatimes.com/2012-05-18/news/31765682_1_internet-rules-arun-jaitley-information-technology-rules"&gt;reexamine the rules&lt;/a&gt;.&amp;nbsp;&amp;nbsp;&lt;/p&gt;
&lt;p&gt;This is an encouraging sign, although it's unlikely that any government action will come in time to forestall this weekend's protests. But even if the intermediary guidelines are ultimately rescinded, India will likely continue its soul-searching on how it deals with the Internet.&lt;/p&gt;
&lt;p&gt;As the world's largest democracy and a model for much of the developing world, and with an Internet population anticipated to surpass that of the United States in the next few years, India is an important, maybe the most important, test case for the future of Internet freedom globally. Should India continue down a course of restriction, other nations eager to restrict online speech could see precedent to impose their own technical and political barriers to free expression online. It would be a tragic irony if India, as one of the developing world's greatest beneficiaries of the information revolution, ended up curbing those same free flows of information and ideas.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/news/india-the-new-front-line-in-the-global-struggle-for-internet-freedom'&gt;https://cis-india.org/news/india-the-new-front-line-in-the-global-struggle-for-internet-freedom&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>praskrishna</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    
    
        <dc:subject>Censorship</dc:subject>
    

   <dc:date>2012-06-18T07:10:21Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/news/beauty-blog-creates-furore">
    <title>A beauty’s blog creates furore</title>
    <link>https://cis-india.org/news/beauty-blog-creates-furore</link>
    <description>
        &lt;b&gt;Her first Tamil poetry anthology Otraiyilaiyena (As a single leaf) saw three editions and the second one Ulagin Azhagiya Muthal Penn (The first beautiful woman in the world) invited mixed reactions like Iyal Poetry Award and a call for a ban by Hindu Makkal Katchi. Parathaiyarul Raani (Queen of sluts) her third collection was a reaction to all the moral policing. &lt;/b&gt;
        
&lt;p&gt;&lt;a class="external-link" href="http://www.deccanchronicle.com/tabloid/chennai/beauty%E2%80%99s-blog-creates-furore-333"&gt;Lakshmi Krupa's article was published in Deccan Chronicle on April 10, 2012&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;While her film Sengadal The Dead Sea was stopped from being screened to the public, until the Supreme Court’s Appellate tribunal intervened with regional censor board for the film clearance, groups like the Makkal Kalai Ilakiya Kazhagam attacked her beliefs. Adding to this list is the latest revelation that the Principal Secretary of IT Department of the Tamil Nadu government requested that her blog be blocked along with a host of others.&lt;/p&gt;
&lt;p&gt;In a text sent from A.K. Kaushik, Additional Director &amp;amp; CPIO Cyber Laws &amp;amp; E-Security in response to an RTI petition on Website Blocking, it was reported that Leena’s blog http://ulaginazhagiyamuthalpenn.blogspot.com was requested to be blocked on 21.07.2010 by the Principal Secretary, IT Department.&lt;/p&gt;
&lt;p&gt;This recent revelation has led to an outrage over the fact that artists and activists like Leena have had to constantly knock on the doors of the legal system to exercise the most basic of their rights. In an interview from London where she is currently the Charles Wallace Visiting Scholar at the University of London, Leena says, “Center for Internet and Society in Bengaluru that works towards upholding Civil Liberties Online, had obtained a list of all websites that were sought to be blocked by Governmental authorities with the use of Right to Information Act.&lt;/p&gt;
&lt;p&gt;They sent me all the details on how my blog was one amongst them as the Principal Secretary, IT Department, Govt of TN had asked for it to be blocked. As the Internet’s role in free speech becomes increasingly prevalent, tactics to control the Internet are growing more refined each year. Methods of accessing private data and censoring content vary between countries, but all maintain an element of oppression. We, who are concerned about civil liberties should wake up to the secret missions of our government on Internet Censorship and protect freedom of speech online.”&lt;/p&gt;
&lt;p&gt;Leena’s blog has been in the center of controversies before too. “Hindu Makkal Katchi, the right wing moral police lodged a police complaint to ban my poetry collections and ban my blog ulaginazhagiyamuthalpenn. blogspot.com. They went to every possible media house and were making threat calls and there were discussions on the alleged obscenity in my poems. They even wanted the Iyal International Poetry Prize and Sirpi Literary Awards to be revoked.”&lt;br /&gt;&lt;br /&gt;Leena’s poetry challenges fanatic minds. “My poetry has a feminist agenda and it is just not about equal rights for women. It is a socialist, anti-institutional political movement which calls for women to break the code, destroy capitalism, live their sexuality and witch hunt every possible patriarchal design. I am not amused about the fact that my poetry gave jitters to ultra blasphemous right and left wingers,” she concludes.&lt;/p&gt;

        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/news/beauty-blog-creates-furore'&gt;https://cis-india.org/news/beauty-blog-creates-furore&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>praskrishna</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    
    
        <dc:subject>Censorship</dc:subject>
    

   <dc:date>2012-04-11T03:50:47Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/it-amendment-act-69-rules-draft-and-final-version-comparison">
    <title>IT (Amendment) Act, 2008, 69 Rules: Draft and Final Version Comparison</title>
    <link>https://cis-india.org/internet-governance/blog/it-amendment-act-69-rules-draft-and-final-version-comparison</link>
    <description>
        &lt;b&gt;Jadine Lannon has performed a clause-by-clause comparison of the Draft 69 Rules and official 69 Rules under Section 69B in order to better understand how the two are similar and how they differ. Very brief notes have been included on some changes we deemed to be important.
&lt;/b&gt;
        &lt;table class="plain"&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;th&gt;&lt;img src="https://cis-india.org/home-images/copy_of_pc1.png" alt="c1" class="image-inline" title="c1" /&gt;&lt;/th&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;img src="https://cis-india.org/home-images/pc2.png" alt="c2" class="image-inline" title="c2" /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;img src="https://cis-india.org/home-images/pc3.png" alt="c3" class="image-inline" title="c3" /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;img src="https://cis-india.org/home-images/pc4.png" alt="c4" class="image-inline" title="c4" /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;img src="https://cis-india.org/home-images/pc5.png" alt="c5" class="image-inline" title="c5" /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;img src="https://cis-india.org/home-images/copy_of_pc6.png" alt="c6" class="image-inline" title="c6" /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;img src="https://cis-india.org/home-images/pc7.png" alt="c7" class="image-inline" title="c7" /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;img src="https://cis-india.org/home-images/pc8.png" alt="c8" class="image-inline" title="c8" /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;tr&gt;
&lt;td&gt;&lt;img src="https://cis-india.org/home-images/pc9.png" alt="c9" class="image-inline" title="c9" /&gt;&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;p style="text-align: justify; "&gt;Similar to the other comparisons that I have done on the 69A and 69B Draft and official Rules, the majority of the changes between these two sets of rules serves to restructure and clarify various clauses in the Draft 69 Rules.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Three new definitions appear in the Clause (2) of the 69 Rules, including a definition for “communication”, which appears in the Draft Rules but has no associated definition under Clause (2) of the Draft Rules.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Clause (31) of the Draft Rules, which deals with the requirement of security agencies of the State and Union territories to share any information gathered through interception, monitoring and/or decryption with federal agencies, does not make an appearance in the official rules. Further, this necessity does not seem to be implied anywhere in the official 69 Rules.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/it-amendment-act-69-rules-draft-and-final-version-comparison'&gt;https://cis-india.org/internet-governance/blog/it-amendment-act-69-rules-draft-and-final-version-comparison&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>jdine</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    
    
        <dc:subject>Information Technology</dc:subject>
    

   <dc:date>2013-04-30T09:56:07Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/gni-and-iamai-launch-interactive-slideshow-exploring-impact-of-indias-internet-laws">
    <title>GNI and IAMAI Launch Interactive Slideshow Exploring Impact of India's Internet Laws </title>
    <link>https://cis-india.org/internet-governance/blog/gni-and-iamai-launch-interactive-slideshow-exploring-impact-of-indias-internet-laws</link>
    <description>
        &lt;b&gt;The Global Network Initiative and the Internet and Mobile Association of India have come together to explain how India’s Internet and technology laws impact economic innovation and freedom of expression. &lt;/b&gt;
        &lt;p&gt;The &lt;a class="external-link" href="http://www.globalnetworkinitiative.org/"&gt;Global Network Initiative (GNI)&lt;/a&gt;, and the &lt;a class="external-link" href="http://www.iamai.in/"&gt;Internet and Mobile Association of India (IAMAI)&lt;/a&gt; have launched an interactive slide show exploring the impact of existing Internet laws on users and businesses in India. The slide show created by Newsbound, and to which Centre for Internet and Society (CIS) has contributed its comments—explain the existing legislative mechanisms prevalent in India, map the challenges of the regulatory environment and highlight areas where such mechanisms can be strengthened.&lt;/p&gt;
&lt;p&gt;Foregrounding the difficulties of content regulation, the slides are aimed at informing users and the public of the constraints of current legal mechanisms in place, including safe harbour and take down and notice provisions. Highlighting Section 79(3) and the Intermediary Liability Rules issued in 2011, the slide show identifies some of the challenges faced by Internet platforms, such as the broad interpretation of the legislation by the executive branch.&lt;/p&gt;
&lt;p&gt;Challenges governing Internet platforms highlighted in the slide show include uniform Terms of Service that do not consider the type of service being provided by the platform, uncertain requirements for taking down content and compliance obligations related to information disclosure. Further the issues of over compliance and misuse of the legal notice and take down system introduced under Section 79 of the Information Technology (Intermediaries Guidelines) Rules 2011.&lt;/p&gt;
&lt;p&gt;The Rules were created with the purpose of providing guidelines for the ‘post-publication redressal mechanism expression as envisioned in the Constitution of India'. However, since their introduction, the Rules have been criticised extensively, by both the national and the international media on account of not conforming to principles of natural justice and freedom of expression. Critics have pointed out that by not recognising the different functions performed by the different intermediaries and by not providing safeguards against misuse of such mechanism for suppressing legitimate expression, the Rules have a chilling effect on freedom of expression.&lt;/p&gt;
&lt;p&gt;Under the current Rules, the third party provider/creator of information is not given a chance to be heard by the intermediary, nor is there a requirement to give a reasoned decision by the intermediary to the creator whose content has been taken down. The take down procedure also, does not have any provisions for restoring the removed information, such as providing a counter notice filing mechanism or appealing to a higher authority.  Further, the content criteria for removal of content includes terms like 'disparaging' and 'objectionable', which are not defined and prima facie seem to be beyond the reasonable restrictions envisioned by the Constitution of India. With uncertainty in content criteria and no safeguards to prevent abuse complainant may send frivolous complaints and suppress legitimate expressions without any fear of repercussions.&lt;/p&gt;
&lt;p&gt;Most importantly, the redressal mechanism under the Rules shifts the burden of censorship, previously, the exclusive domain of the judiciary or the executive, and makes it the responsibility of private intermediaries. Often, private intermediaries, do not have sufficient legal resources to subjectively determine the legitimacy of a legal claim, resulting in over compliance to limit liability. The slide show cites  the &lt;a href="https://cis-india.org/internet-governance/chilling-effects-on-free-expression-on-internet"&gt;2011 CIS research carried out by Rishabh Dara&lt;/a&gt; to determine whether the Rules lead to a chilling effect on online free expression, towards highlighting the issue of over compliance and self censorship.&lt;/p&gt;
&lt;p&gt;The initiative is timely, given the change of guard in India, and stresses, not only the economic impact of fixing the Internet legal framework, but also the larger impact on users rights and freedom of expression. The initiative calls for a legal environment for the Internet that enables innovation, protects the rights of users, and provides clear rules and regulations for businesses large and small.&lt;/p&gt;
&lt;p&gt;See the slideshow here: &lt;a href="http://globalnetworkinitiative.org/india"&gt;How India’s Internet Laws Can Help Propel the Country Forward&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;&lt;strong&gt;Other GNI reports and resources: &lt;/strong&gt;&lt;/p&gt;
&lt;p&gt;&lt;a href="http://www.globalnetworkinitiative.org/sites/default/files/Closing%20the%20Gap%20-%20Copenhagen%20Economics_March%202014_0.pdf"&gt;Closing the Gap: Indian Online Intermediaries and a Liability System Not Yet Fit for Purpose&lt;/a&gt;&lt;/p&gt;
&lt;p&gt;&lt;a href="http://www.globalnetworkinitiative.org/sites/default/files/Closing%20the%20Gap%20-%20Copenhagen%20Economics_March%202014_0.pdf"&gt;Strengthening Protections for Online Platforms Could Add Billions to India’s GDP&lt;/a&gt;&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/gni-and-iamai-launch-interactive-slideshow-exploring-impact-of-indias-internet-laws'&gt;https://cis-india.org/internet-governance/blog/gni-and-iamai-launch-interactive-slideshow-exploring-impact-of-indias-internet-laws&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>jyoti</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Censorship</dc:subject>
    
    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    
    
        <dc:subject>Chilling Effect</dc:subject>
    
    
        <dc:subject>Information Technology</dc:subject>
    

   <dc:date>2014-07-17T12:01:01Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/chilling-effects-on-free-expression-on-internet">
    <title>Intermediary Liability in India: Chilling Effects on Free Expression on the Internet</title>
    <link>https://cis-india.org/internet-governance/chilling-effects-on-free-expression-on-internet</link>
    <description>
        &lt;b&gt;The Centre for Internet &amp; Society in partnership with Google India conducted the Google Policy Fellowship 2011. This was offered for the first time in Asia Pacific as well as in India. Rishabh Dara was selected as a Fellow and researched upon issues relating to freedom of expression. The results of the paper demonstrate that the ‘Information Technology (Intermediaries Guidelines) Rules 2011’ notified by the Government of India on April 11, 2011 have a chilling effect on free expression.&lt;/b&gt;
        &lt;p style="text-align: justify; "&gt;Intermediaries are widely recognised as essential cogs in the wheel of exercising the right to freedom of expression on the Internet. Most major jurisdictions around the world have introduced legislations for limiting intermediary liability in order to ensure that this wheel does not stop spinning. With the 2008 amendment of the Information Technology Act 2000, India joined the bandwagon and established a ‘notice and takedown’ regime for limiting intermediary liability.&lt;br /&gt;&lt;br /&gt;On the 11th of April 2011, the Government of India notified the ‘Information Technology (Intermediaries Guidelines) Rules 2011’ that prescribe, amongst other things, guidelines for administration of takedowns by intermediaries. The Rules have been criticised extensively by both the national and the international media. The media has projected that the Rules, contrary to the objective of promoting free expression, seem to encourage privately administered injunctions to censor and chill free expression. On the other hand, the Government has responded through press releases and assured that the Rules in their current form do not violate the principle of freedom of expression or allow the government to regulate content.&lt;br /&gt;&lt;br /&gt;This study has been conducted with the objective of determining whether the criteria, procedure and safeguards for administration of the takedowns as prescribed by the Rules lead to a chilling effect on online free expression. In the course of the study, takedown notices were sent to a sample comprising of 7 prominent intermediaries and their response to the notices was documented. Different policy factors were permuted in the takedown notices in order to understand at what points in the process of takedown, free expression is being chilled.&lt;br /&gt;&lt;br /&gt;The results of the paper clearly demonstrate that the Rules indeed have a chilling effect on free expression. Specifically, the Rules create uncertainty in the criteria and procedure for administering the takedown thereby inducing the intermediaries to err on the side of caution and over-comply with takedown notices in order to limit their liability; and as a result suppress legitimate expressions. Additionally, the Rules do not establish sufficient safeguards to prevent misuse and abuse of the takedown process to suppress legitimate expressions.&lt;br /&gt;&lt;br /&gt;Of the 7 intermediaries to which takedown notices were sent, 6 intermediaries over-complied with the notices, despite the apparent flaws in them. From the responses to the takedown notices, it can be reasonably presumed that not all intermediaries have sufficient legal competence or resources to deliberate on the legality of an expression. Even if such intermediary has sufficient legal competence, it has a tendency to prioritize the allocation of its legal resources according to the commercial importance of impugned expressions. Further, if such subjective determination is required to be done in a limited timeframe and in the absence of adequate facts and circumstances, the intermediary mechanically (without application of mind or proper judgement) complies with the takedown notice.&lt;br /&gt;&lt;br /&gt;The results also demonstrate that the Rules are procedurally flawed as they ignore all elements of natural justice. The third party provider of information whose expression is censored is not informed about the takedown, let alone given an opportunity to be heard before or after the takedown. There is also no recourse to have the removed information put-back or restored. The intermediary is under no obligation to provide a reasoned decision for rejecting or accepting a takedown notice.&lt;/p&gt;
&lt;p&gt;The Rules in their current form clearly tilt the takedown mechanism in favour of the complainant and adversely against the creator of expression.&lt;/p&gt;
&lt;table class="plain"&gt;
&lt;tbody&gt;
&lt;tr&gt;
&lt;td&gt;The research highlights the need to:&lt;br /&gt; 
&lt;ul&gt;
&lt;li&gt; increase the safeguards against misuse of the privately administered takedown regime&lt;/li&gt;
&lt;/ul&gt;
&lt;ul&gt;
&lt;li&gt;reduce the uncertainty in the criteria for administering the takedown&lt;/li&gt;
&lt;/ul&gt;
&lt;ul&gt;
&lt;li&gt; reduce the uncertainty in the procedure for administering the takedown&lt;/li&gt;
&lt;/ul&gt;
&lt;ul&gt;
&lt;li&gt; include various elements of natural justice in the procedure for administering the takedown&lt;/li&gt;
&lt;/ul&gt;
&lt;ul&gt;
&lt;li&gt;replace the requirement for subjective legal determination by intermediaries with an objective test&lt;/li&gt;
&lt;/ul&gt;
&lt;/td&gt;
&lt;/tr&gt;
&lt;/tbody&gt;
&lt;/table&gt;
&lt;p&gt;&lt;a href="https://cis-india.org/internet-governance/intermediary-liability-in-india.pdf" class="internal-link" title="Intermediary Liability in India"&gt;Click&lt;/a&gt; to download the report [PDF, 406 Kb]&lt;/p&gt;
&lt;hr /&gt;
&lt;h3&gt;Appendix 2&lt;/h3&gt;
&lt;ul&gt;
&lt;li&gt;&lt;a href="https://cis-india.org/internet-governance/intermediary-liability-and-foe-executive-summary.pdf" class="internal-link"&gt;Intermediary Liability and Freedom of Expression — Executive Summary&lt;/a&gt; (PDF, 263 Kb)&lt;/li&gt;
&lt;li&gt;&lt;a href="https://cis-india.org/internet-governance/counter-proposal-by-cis-draft-it-intermediary-due-diligence-and-information-removal-rules-2012.odt" class="internal-link"&gt;Counter-proposal by the Centre for Internet and Society: Draft Information Technology (Intermediary Due Diligence and Information Removal) Rules, 2012&lt;/a&gt; (Open Office Document, 231 Kb)&lt;/li&gt;
&lt;li&gt;&lt;a href="https://cis-india.org/internet-governance/counter-proposal-by-cis-draft-it-intermediary-due-diligence-and-information-removal-rules-2012.pdf" class="internal-link"&gt;Counter-proposal by the Centre for Internet and Society: Draft Information Technology (Intermediary Due Diligence and Information Removal) Rules, 2012&lt;/a&gt; (PDF, 422 Kb)&lt;/li&gt;
&lt;/ul&gt;
&lt;hr /&gt;
&lt;p&gt;The above documents have been sent to:&lt;/p&gt;
&lt;ol&gt;
&lt;li&gt;Shri Kapil Sibal, Minister of Human Resource Development and Minister of Communications and Information Technology&lt;/li&gt;
&lt;li&gt;Shri Milind Murli Deora, Minister of State of Communications and Information Technology&lt;/li&gt;
&lt;li&gt;Shri Sachin Pilot, Minister of State, Ministry of Communications and Information Technology&lt;/li&gt;
&lt;li&gt;Dr. Anita Bhatnagar, Joint Secretary, Department of Electronics &amp;amp; Information Technology, Ministry of Communications &amp;amp; Information Technology&lt;/li&gt;
&lt;li&gt;Dr. Ajay Kumar, Joint Secretary, Department of Electronics &amp;amp; Information Technology, Ministry of Communications &amp;amp; Information Technology&lt;/li&gt;
&lt;li&gt;Dr. Gulshan Rai, Scientist G &amp;amp; Group Coordinator, Director General, ICERT, Controller Of Certifying, Authorities and Head of Division, Cyber Appellate Tribunal &lt;/li&gt;
&lt;/ol&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/chilling-effects-on-free-expression-on-internet'&gt;https://cis-india.org/internet-governance/chilling-effects-on-free-expression-on-internet&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>Rishabh Dara</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Public Accountability</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Research</dc:subject>
    
    
        <dc:subject>Featured</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    
    
        <dc:subject>Censorship</dc:subject>
    

   <dc:date>2012-12-14T10:22:24Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/news/articles-latimes-com-mark-magnier-aug-23-2012-india-limits-social-media-after-civil-unrest">
    <title>India limits social media after civil unrest</title>
    <link>https://cis-india.org/news/articles-latimes-com-mark-magnier-aug-23-2012-india-limits-social-media-after-civil-unrest</link>
    <description>
        &lt;b&gt;Indian officials have gone too far in limiting text messages and pressuring local Internet firms as well as Twitter and others to block accounts, critics say.&lt;/b&gt;
        &lt;hr /&gt;
&lt;p style="text-align: justify; "&gt;This article by Mark Magnier was published in &lt;a class="external-link" href="http://articles.latimes.com/2012/aug/23/world/la-fg-india-twitter-20120824"&gt;Los Angeles Times&lt;/a&gt; on August 23, 2012 and re-posted in &lt;a class="external-link" href="http://www.channel6newsonline.com/2012/08/after-civil-unrest-indian-government-places-limits-social-media/"&gt;Channel 6 News&lt;/a&gt; on August 24, 2012. Sunil Abraham is quoted.&lt;/p&gt;
&lt;hr /&gt;
&lt;p style="text-align: justify; "&gt;Has the Indian government lost its sense of humor?&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;That's what some in India were asking as word spread that authorities had pressured Twitter into blocking several accounts parodying the prime minister after civil unrest that saw dozens of people from northeastern India killed and thousands flee in panic.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;This week, the government also imposed a two-week limit of five text messages a day — raised Thursday to 20 — potentially affecting hundreds of millions of people, and pressured local Internet companies as well as Facebook, Twitter and Google to block hundreds of websites and user accounts.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Although journalists, free speech advocates and bloggers said the effort to squelch rumors may be justified, several criticized the actions as excessive.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;"You cannot burn the entire house to kill one mischievous mouse," said Gyana Ranjan Swain, a senior editor at Voice &amp;amp; Data, a networking trade magazine. "You're in the 21st century. Their thinking is still 50 years old. It's just 'kill the messenger.'"&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Comedians said Indian political humor is evolving and there's more leeway to make fun of politicians than a decade ago, but the nation's mores still call for greater respect than in the West.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;"If I tried something like South Park, I'd be put behind bars tomorrow," said Rahul Roushan, founder of Faking News website, which satirizes Indian current events.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Faking News has lampooned the recent corruption scandals, including specious stories about theme restaurants (where customers must bribe waiters or go hungry); and a tongue-in-cheek report that India has banned the zero because too many of them appear nowadays in auditors' reports, after recent coal and telecommunications scandals each allegedly involving more than $30 billion.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Roushan, whose site isn't blocked, said he hopes low-level officials misinterpreted government directives.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;"I'm still in a state of disbelief," he said. "I don't think the government is so stupid that it can ask that parody accounts get taken down. If they did, God help this country."&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;A spokesman for the prime minister's office said the blocking of six fake Twitter accounts attributed to the prime minister has been in the works for months and wasn't related to the recent crisis. He said the move was in response to tweets containing hate language and caste insults that readers could easily mistake as the Indian leader's. A dozen Twitter accounts and about 300 websites were blocked, according to news reports.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;"We have not lost our sense of humor," said Pankaj Pachauri, the prime minister's spokesman. "We started a procedure to take action against people misrepresenting themselves."&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;But some Twitter users whose accounts are frozen, including media consultant Kanchan Gupta, counter that the government may be using the crisis to muzzle critics.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;"I'm very clear in my mind this is a political decision," said Gupta, who has been critical of corruption and the government's policy drift. "If they were openly confrontational of me, they'd go nowhere, so they're trying this."&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Attempts to access his Twitter page Thursday were met with the message: "This website/URL has been blocked until further notice either pursuant to Court orders or on the Directions issued by the Department of Telecommunications."&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Even Britain's Queen Elizabeth II has numerous parody accounts so India needs to lighten up, consultant Gupta said.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;He's received several messages from worried Pakistani friends since the news broke. "They ask if I'm all right, say they hope they haven't frog-marched you to jail," he said. "What irony."&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The restrictions are the latest chapter of a crisis that started in July when Muslims and members of the Bodo tribal community in northeastern India clashed over land, jobs and politics. The result: 75 people killed and 300,000 displaced.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Muslims in Mumbai, formerly Bombay, staged a sympathy demonstration last week; two more people were killed and dozens injured.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Rumors, hate messages and altered photos of supposed atrocities against Muslims soon spread on social media sites, and several people from northeastern India were beaten in Bangalore and other cities, prompting the crackdown.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;New Delhi has accused Pakistani websites of fanning the online rumors. (Islamabad said it would investigate if there's any proof.) But Indian news media also reported that 20% of the websites blocked contained inflammatory material uploaded by Hindu nationalist groups in India that were apparently trying to stir up sectarian trouble.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;The Twitter community has responded with derision and humor to limits on text messages on prepaid cellphones.&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;"Feeling deeply insulted that I still have not been blocked," tweeted user @abhijitmajumder. "Victim of govt apathy."&lt;/p&gt;
&lt;p style="text-align: justify; "&gt;Sunil Abraham, head of the Bangalore civic group Center for Internet and Society, said this week's restrictions are the latest in a series of regulations and recommendations aimed at tightening Internet control.&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/news/articles-latimes-com-mark-magnier-aug-23-2012-india-limits-social-media-after-civil-unrest'&gt;https://cis-india.org/news/articles-latimes-com-mark-magnier-aug-23-2012-india-limits-social-media-after-civil-unrest&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>praskrishna</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Social media</dc:subject>
    
    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>Public Accountability</dc:subject>
    
    
        <dc:subject>Internet Governance</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    
    
        <dc:subject>Censorship</dc:subject>
    

   <dc:date>2012-09-04T11:59:01Z</dc:date>
   <dc:type>News Item</dc:type>
   </item>


    <item rdf:about="https://cis-india.org/internet-governance/blog/rebuttal-dit-press-release-intermediaries">
    <title>Rebuttal of DIT's Misleading Statements on New Internet Rules</title>
    <link>https://cis-india.org/internet-governance/blog/rebuttal-dit-press-release-intermediaries</link>
    <description>
        &lt;b&gt;The press statement issued on May 11 by the Department of Information Technology (DIT) on the furore over the newly-issued rules on 'intermediary due diligence' is misleading and is, in places, plainly false.  We are presenting a point-by-point rebuttal of the DIT's claims.&lt;/b&gt;
        &lt;p&gt;In its &lt;a class="external-link" href="http://pib.nic.in/newsite/erelease.aspx?relid=72066"&gt;press release on Wednesday, May 11, 2011&lt;/a&gt;, the DIT stated:
&lt;blockquote&gt;The
 attention of Government has been drawn to news items in a section of 
media on certain aspects of the Rules notified under Section 79 
pertaining to liability of intermediaries under the Information 
Technology Act, 2000. These items have raised two broad issues. One is 
that words used in Rules for objectionable content are broad and could 
be interpreted subjectively. Secondly, there is an apprehension that the
 Rules enable the Government to regulate content in a highly subjective 
and possibly arbitrary manner. &lt;br /&gt;&lt;/blockquote&gt;
&lt;p&gt;There are actually more issues than merely "subjective interpretation" and "arbitrary governmental regulation".&lt;/p&gt;
&lt;ul&gt;&lt;li style="list-style-type: disc;"&gt;The
 Indian Constitution limits how much the government can regulate 
citizens’ fundamental right to freedom of speech and expression. Any 
measure afoul of the constitution is invalid. &lt;/li&gt;&lt;li style="list-style-type: disc;"&gt;Several
 portions of the rules are beyond the limited powers that Parliament had
 granted the Department of IT to create interpretive rules under the 
Information Technology Act. Parliament directed the Government to merely
 define what “due diligence” requirements an intermediary would have to 
follow in order to claim the qualified protection against liability that
 Section 79 of the Information Technology Act provides; these current 
rules have gone dangerously far beyond that, by framing rules that 
insist that intermediaries, without investigation, has to remove content within 36-hours of  receipt of a 
complaint, keep records of a users' details and provide them to 
law enforcement officials.&lt;/li&gt;&lt;/ul&gt;
&lt;p&gt;The Department of Information Technology (DIT), Ministry of 
Communications &amp;amp; IT has clarified that the Intermediaries Guidelines
 Rules, 2011 prescribe that due diligence need to be observed by the 
Intermediaries to enjoy exemption from liability for hosting any third 
party information under Section 79 of the Information Technology Act, 
2000. These due diligence practices are the best practices followed 
internationally by well-known mega corporations operating on the 
Internet. &amp;nbsp;The terms specified in the Rules are in accordance with the 
terms used by most of the Intermediaries as part of their existing 
practices, policies and terms of service which they have published on 
their website.&lt;/p&gt;
&lt;ol&gt;&lt;li&gt;We are not aware of any country that actually goes to the extent of 
deciding what Internet-wide ‘best practices’ are and actually converting
 those ‘best practices’ into law by prescribing a universal terms of 
service that all Internet services, websites, and products should enforce.&lt;/li&gt;&lt;li&gt;The Rules require all intermediaries to include the 
government-prescribed terms in an agreement, no matter what services 
they provide. It is one thing for a company to choose the terms of its 
terms of service agreement, and completely another for the government to
 dictate those terms of service. As long as the terms of service of an 
intermediary are not unlawful or bring up issues of users’ rights (such 
as the right to privacy), there is no reason for the government to jump 
in and dictate what the terms of service should or should not be.&lt;/li&gt;&lt;li&gt;The DIT has not offered any proof to back up its assertion that 'most' 
intermediaries already have such terms. &amp;nbsp;Google, a ‘mega corporation’ 
which is an intermediary, &lt;a class="external-link" href="http://www.google.com/accounts/TOS?hl=en"&gt;does not have such an overarching policy&lt;/a&gt;. &amp;nbsp;Indiatimes, another ‘mega 
corporation’ intermediary, &lt;a class="external-link" href="http://www.indiatimes.com/policyterms/1555176.cms"&gt;does not either&lt;/a&gt;. &amp;nbsp;Just because &lt;a class="external-link" href="http://www.rediff.com/termsofuse.html"&gt;a 
company like Rediff&lt;/a&gt; and &lt;a class="external-link" href="http://us.blizzard.com/en-us/company/legal/wow_tou.html"&gt;
Blizzard's World of Warcraft&lt;/a&gt; have some of those terms does not mean a) that they should have all of those terms, nor that b) everyone else should as well.&lt;br /&gt;&lt;br /&gt;In
 attempting to take different terms of service from different Internet 
services and products—the very fact of which indicate the differing 
needs felt across varying online communities—the Department has put in
 place a one-size-fits-all approach.&amp;nbsp; How can this be possible on the Internet, when we wouldn't regulate the post-office and a book publisher under the same rules of liability for, say, defamatory speech.&lt;/li&gt;&lt;li&gt;There is also a significant difference between the effect of those 
terms of service and that of these Rules.&amp;nbsp; An intermediary-framed terms of service 
suggest that the intermediary &lt;em&gt;may&lt;/em&gt; investigate and boot someone off a service for violation, while the Rules insist that 
the intermediary simply has to mandatorily remove content, keep records of users' details and provide them to law enforcement officials, 
else be subject to crippling legal liability.&lt;/li&gt;&lt;/ol&gt;
&lt;p&gt;So
 to equate the effect of these Rules to merely following ‘existing 
practices’ is plainly wrong. An intermediary—like the CIS website—should have the freedom to choose not to have terms of service 
agreements. We now don’t.“In case any issue arises concerning the interpretation of the terms 
used by the Intermediary, which is not agreed to by the user or affected
 person, the same can only be adjudicated by a Court of Law. The 
Government or any of its agencies have no power to intervene or even 
interpret. DIT has reiterated that there is no intention of the 
Government to acquire regulatory jurisdiction over content under these 
Rules. It has categorically said that these rules do not provide for any
 regulation or control of content by the Government.”&lt;/p&gt;
&lt;p&gt;The
 Rules are based on the presumption that all complaints (and resultant 
mandatory taking down of the content) are correct, and that the 
incorrectness of the take-downs can be disputed in court. &amp;nbsp;Why not just 
invert that, and presume that all complaints need to be proven first, and the correctness of the complaints (instead of the take-downs) be disputed in court? &amp;nbsp;&lt;/p&gt;
&lt;p&gt;Indeed,
 the courts have insisted that presumption of validity is the only 
constitutional way of dealing with speech. (See, for instance, &lt;em&gt;Karthikeyan R. v. Union 
of India&lt;/em&gt;, a 2010 Madras High Court judgment.)&lt;/p&gt;
&lt;p&gt;Further,
 only constitutional courts (namely High Courts and the Supreme Court) 
can go into the question of the validity of a law. &amp;nbsp;Other courts have to
 apply the law, even if it the judge believes it is constitutionally 
invalid. &amp;nbsp;So, most courts will be forced to apply this law of highly 
questionable constitutionality until a High Court or the Supreme Court 
strikes it down.&lt;/p&gt;
&lt;p&gt;What
 the Department has in fact done is to explicitly open up the floodgates
 for increased liability claims and litigation - which runs exactly 
counter to the purpose behind the amendment of Section 79 by Parliament 
in 2008.&lt;/p&gt;
&lt;blockquote&gt;“The
 Government adopted a very transparent process for formulation of the 
Rules under the Information Technology Act. The draft Rules were 
published on the Department of Information Technology website for 
comments and were widely covered by the media. None of the Industry 
Associations and other stakeholders objected to the formulation which is
 now being cited in some section of media.”&lt;br /&gt;&lt;/blockquote&gt;
&lt;p&gt;This is a blatant lie.&lt;/p&gt;
&lt;p&gt;Civil
 society voices, including &lt;a href="https://cis-india.org/internet-governance/blog/2011/02/25/intermediary-due-diligence" class="external-link"&gt;CIS&lt;/a&gt;, &lt;a class="external-link" href="http://www.softwarefreedom.in/index.php?option=com_idoblog&amp;amp;task=viewpost&amp;amp;id=86&amp;amp;Itemid=70"&gt;Software Freedom Law Centre&lt;/a&gt;, and 
individual experts (such as the lawyer and published author &lt;a class="external-link" href="http://www.iltb.net/2011/02/draft-rules-on-intermediary-liability-released-by-the-ministry-of-it/"&gt;Apar Gupta&lt;/a&gt;) 
sent in comments. &amp;nbsp;Companies &lt;a class="external-link" href="http://online.wsj.com/article/SB10001424052748704681904576314652996232860.html?mod=WSJINDIA_hps_LEFTTopWhatNews"&gt;such as Google&lt;/a&gt;, &lt;a class="external-link" href="http://e2enetworks.com/2011/05/13/e2e-networks-response-to-draft-rules-for-intermediary-guidelines/"&gt;E2E Networks&lt;/a&gt;, and others had apparently 
raised concerns as well.&amp;nbsp; The press has published many a cautionary note, including editorials, op-ed and articles in &lt;a class="external-link" href="http://www.thehindu.com/opinion/lead/article1487299.ece"&gt;the&lt;/a&gt; &lt;a class="external-link" href="http://www.thehindu.com/opinion/editorial/article1515144.ece"&gt;Hindu&lt;/a&gt;, &lt;a class="external-link" href="http://www.thehoot.org/web/home/story.php?sectionId=6&amp;amp;mod=1&amp;amp;pg=1&amp;amp;valid=true&amp;amp;storyid=5163"&gt;the Hoot&lt;/a&gt;, Medianama.com, and Kafila.com, well before the new rules were notified.&amp;nbsp;  We at CIS even received a 'read notification' 
from the email account of the Group Coordinator of the DIT’s Cyber Laws 
Division—Dr. Gulshan Rai—on Thursday, March 3, 2011 at 12:04 PM (we had 
sent the mail to Dr. Rai on Monday, February 28, 2011). &amp;nbsp;We never 
received any acknowledgement, though, not even after we made an express 
request for acknowledgement (and an offer to meet them in person to 
explain our concerns) on Tuesday, April 5, 2011 in an e-mail sent to Mr.
 Prafulla Kumar and Dr. Gulshan Rai of DIT.&lt;/p&gt;
&lt;p&gt;The
 process can hardly be called 'transparent' when the replies received 
from 'industry associations and other stakeholders' have not been made 
public by the DIT. Those comments which are public all indicate that 
serious concerns were raised as to the constitutionality of the Rules.&lt;/p&gt;
&lt;p&gt;The Government has been forward looking to create a conducive 
environment for the Internet medium to catapult itself onto a different 
plane with the evolution of the Internet. The Government remains fully 
committed to freedom of speech and expression and the citizen’s rights 
in this regard.&lt;/p&gt;
&lt;p&gt;&lt;span id="internal-source-marker_0.8528041979429147"&gt;The DIT has limited this statement to the rules on intermediary due 
diligence, and has not spoken about the controversial new rules that 
stifle cybercafes, and restrict users' privacy and freedom to receive 
information.&lt;br /&gt;&lt;/span&gt;&lt;/p&gt;
&lt;p&gt;&lt;span id="internal-source-marker_0.8528041979429147"&gt;&lt;/span&gt;If
 the government is serious about creating a conducive environment for 
innovation, privacy and free expression on the Internet, then it wouldn’t be 
passing Rules that curb down on them, and it definitely will not be 
doing so in such a non-transparent fashion.&lt;/p&gt;&lt;/p&gt;
        &lt;p&gt;
        For more details visit &lt;a href='https://cis-india.org/internet-governance/blog/rebuttal-dit-press-release-intermediaries'&gt;https://cis-india.org/internet-governance/blog/rebuttal-dit-press-release-intermediaries&lt;/a&gt;
        &lt;/p&gt;
    </description>
    <dc:publisher>No publisher</dc:publisher>
    <dc:creator>pranesh</dc:creator>
    <dc:rights></dc:rights>

    
        <dc:subject>Freedom of Speech and Expression</dc:subject>
    
    
        <dc:subject>IT Act</dc:subject>
    
    
        <dc:subject>Featured</dc:subject>
    
    
        <dc:subject>Intermediary Liability</dc:subject>
    

   <dc:date>2012-07-11T13:18:04Z</dc:date>
   <dc:type>Blog Entry</dc:type>
   </item>




</rdf:RDF>
